diff --git a/dataproc-v1-py.tar.gz b/dataproc-v1-py.tar.gz new file mode 100644 index 00000000..978cc2e1 Binary files /dev/null and b/dataproc-v1-py.tar.gz differ diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py index ba982e58..1207dfc7 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -231,9 +231,9 @@ async def create_autoscaling_policy( from google.cloud import dataproc_v1 - def sample_create_autoscaling_policy(): + async def sample_create_autoscaling_policy(): # Create a client - client = dataproc_v1.AutoscalingPolicyServiceClient() + client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() # Initialize request argument(s) policy = dataproc_v1.AutoscalingPolicy() @@ -247,7 +247,7 @@ def sample_create_autoscaling_policy(): ) # Make the request - response = client.create_autoscaling_policy(request=request) + response = await client.create_autoscaling_policy(request=request) # Handle the response print(response) @@ -353,14 +353,13 @@ async def update_autoscaling_policy( Disabled check for update_mask, because all updates will be full replacements. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_update_autoscaling_policy(): + async def sample_update_autoscaling_policy(): # Create a client - client = dataproc_v1.AutoscalingPolicyServiceClient() + client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() # Initialize request argument(s) policy = dataproc_v1.AutoscalingPolicy() @@ -373,7 +372,7 @@ def sample_update_autoscaling_policy(): ) # Make the request - response = client.update_autoscaling_policy(request=request) + response = await client.update_autoscaling_policy(request=request) # Handle the response print(response) @@ -470,9 +469,9 @@ async def get_autoscaling_policy( from google.cloud import dataproc_v1 - def sample_get_autoscaling_policy(): + async def sample_get_autoscaling_policy(): # Create a client - client = dataproc_v1.AutoscalingPolicyServiceClient() + client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() # Initialize request argument(s) request = dataproc_v1.GetAutoscalingPolicyRequest( @@ -480,7 +479,7 @@ def sample_get_autoscaling_policy(): ) # Make the request - response = client.get_autoscaling_policy(request=request) + response = await client.get_autoscaling_policy(request=request) # Handle the response print(response) @@ -587,9 +586,9 @@ async def list_autoscaling_policies( from google.cloud import dataproc_v1 - def sample_list_autoscaling_policies(): + async def sample_list_autoscaling_policies(): # Create a client - client = dataproc_v1.AutoscalingPolicyServiceClient() + client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() # Initialize request argument(s) request = dataproc_v1.ListAutoscalingPoliciesRequest( @@ -600,7 +599,7 @@ def sample_list_autoscaling_policies(): page_result = client.list_autoscaling_policies(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -715,14 +714,13 @@ async def delete_autoscaling_policy( delete an autoscaling policy that is in use by one or more clusters. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_delete_autoscaling_policy(): + async def sample_delete_autoscaling_policy(): # Create a client - client = dataproc_v1.AutoscalingPolicyServiceClient() + client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() # Initialize request argument(s) request = dataproc_v1.DeleteAutoscalingPolicyRequest( @@ -730,7 +728,7 @@ def sample_delete_autoscaling_policy(): ) # Make the request - client.delete_autoscaling_policy(request=request) + await client.delete_autoscaling_policy(request=request) Args: request (Union[google.cloud.dataproc_v1.types.DeleteAutoscalingPolicyRequest, dict]): diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py index e52f792c..b0c7a096 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -426,6 +426,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def create_autoscaling_policy( @@ -570,7 +571,6 @@ def update_autoscaling_policy( Disabled check for update_mask, because all updates will be full replacements. - .. code-block:: python from google.cloud import dataproc_v1 @@ -906,7 +906,6 @@ def delete_autoscaling_policy( delete an autoscaling policy that is in use by one or more clusters. - .. code-block:: python from google.cloud import dataproc_v1 diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py index 4a5a9c4f..e2bf3b59 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py @@ -55,6 +55,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -81,10 +82,6 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} @@ -106,6 +103,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -118,6 +120,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -244,5 +251,9 @@ def delete_autoscaling_policy( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("AutoscalingPolicyServiceTransport",) diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py index dc3800d9..2962dd25 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py @@ -60,6 +60,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -155,6 +156,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: @@ -381,5 +383,9 @@ def delete_autoscaling_policy( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("AutoscalingPolicyServiceGrpcTransport",) diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py index 38bf8786..87267eae 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py @@ -105,6 +105,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -200,6 +201,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/dataproc_v1/services/batch_controller/async_client.py b/google/cloud/dataproc_v1/services/batch_controller/async_client.py index eebdf5de..7b5b5f43 100644 --- a/google/cloud/dataproc_v1/services/batch_controller/async_client.py +++ b/google/cloud/dataproc_v1/services/batch_controller/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -223,14 +223,13 @@ async def create_batch( r"""Creates a batch workload that executes asynchronously. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_create_batch(): + async def sample_create_batch(): # Create a client - client = dataproc_v1.BatchControllerClient() + client = dataproc_v1.BatchControllerAsyncClient() # Initialize request argument(s) batch = dataproc_v1.Batch() @@ -246,7 +245,7 @@ def sample_create_batch(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -361,9 +360,9 @@ async def get_batch( from google.cloud import dataproc_v1 - def sample_get_batch(): + async def sample_get_batch(): # Create a client - client = dataproc_v1.BatchControllerClient() + client = dataproc_v1.BatchControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.GetBatchRequest( @@ -371,7 +370,7 @@ def sample_get_batch(): ) # Make the request - response = client.get_batch(request=request) + response = await client.get_batch(request=request) # Handle the response print(response) @@ -456,9 +455,9 @@ async def list_batches( from google.cloud import dataproc_v1 - def sample_list_batches(): + async def sample_list_batches(): # Create a client - client = dataproc_v1.BatchControllerClient() + client = dataproc_v1.BatchControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.ListBatchesRequest( @@ -469,7 +468,7 @@ def sample_list_batches(): page_result = client.list_batches(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -561,14 +560,13 @@ async def delete_batch( terminal state, the delete fails and the response returns ``FAILED_PRECONDITION``. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_delete_batch(): + async def sample_delete_batch(): # Create a client - client = dataproc_v1.BatchControllerClient() + client = dataproc_v1.BatchControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.DeleteBatchRequest( @@ -576,7 +574,7 @@ def sample_delete_batch(): ) # Make the request - client.delete_batch(request=request) + await client.delete_batch(request=request) Args: request (Union[google.cloud.dataproc_v1.types.DeleteBatchRequest, dict]): diff --git a/google/cloud/dataproc_v1/services/batch_controller/client.py b/google/cloud/dataproc_v1/services/batch_controller/client.py index 2b71958d..e1f162e3 100644 --- a/google/cloud/dataproc_v1/services/batch_controller/client.py +++ b/google/cloud/dataproc_v1/services/batch_controller/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -431,6 +431,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def create_batch( @@ -447,7 +448,6 @@ def create_batch( r"""Creates a batch workload that executes asynchronously. - .. code-block:: python from google.cloud import dataproc_v1 @@ -785,7 +785,6 @@ def delete_batch( terminal state, the delete fails and the response returns ``FAILED_PRECONDITION``. - .. code-block:: python from google.cloud import dataproc_v1 diff --git a/google/cloud/dataproc_v1/services/batch_controller/transports/base.py b/google/cloud/dataproc_v1/services/batch_controller/transports/base.py index dd9dde35..6eae6f77 100644 --- a/google/cloud/dataproc_v1/services/batch_controller/transports/base.py +++ b/google/cloud/dataproc_v1/services/batch_controller/transports/base.py @@ -57,6 +57,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -83,10 +84,6 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} @@ -108,6 +105,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -120,6 +122,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -193,5 +200,9 @@ def delete_batch( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("BatchControllerTransport",) diff --git a/google/cloud/dataproc_v1/services/batch_controller/transports/grpc.py b/google/cloud/dataproc_v1/services/batch_controller/transports/grpc.py index 2ae7edfe..d76c0ae0 100644 --- a/google/cloud/dataproc_v1/services/batch_controller/transports/grpc.py +++ b/google/cloud/dataproc_v1/services/batch_controller/transports/grpc.py @@ -62,6 +62,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -158,6 +159,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: @@ -352,5 +354,9 @@ def delete_batch(self) -> Callable[[batches.DeleteBatchRequest], empty_pb2.Empty def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("BatchControllerGrpcTransport",) diff --git a/google/cloud/dataproc_v1/services/batch_controller/transports/grpc_asyncio.py b/google/cloud/dataproc_v1/services/batch_controller/transports/grpc_asyncio.py index e120908d..5536f3a0 100644 --- a/google/cloud/dataproc_v1/services/batch_controller/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1/services/batch_controller/transports/grpc_asyncio.py @@ -107,6 +107,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -203,6 +204,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/dataproc_v1/services/cluster_controller/async_client.py b/google/cloud/dataproc_v1/services/cluster_controller/async_client.py index 0d1876c7..5e2e6fb8 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/async_client.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -225,14 +225,13 @@ async def create_cluster( be `ClusterOperationMetadata `__. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_create_cluster(): + async def sample_create_cluster(): # Create a client - client = dataproc_v1.ClusterControllerClient() + client = dataproc_v1.ClusterControllerAsyncClient() # Initialize request argument(s) cluster = dataproc_v1.Cluster() @@ -250,7 +249,7 @@ def sample_create_cluster(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -330,6 +329,17 @@ def sample_create_cluster(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -370,14 +380,13 @@ async def update_cluster( [``RUNNING``][google.cloud.dataproc.v1.ClusterStatus.State] state or an error is returned. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_update_cluster(): + async def sample_update_cluster(): # Create a client - client = dataproc_v1.ClusterControllerClient() + client = dataproc_v1.ClusterControllerAsyncClient() # Initialize request argument(s) cluster = dataproc_v1.Cluster() @@ -396,7 +405,7 @@ def sample_update_cluster(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -553,6 +562,18 @@ def sample_update_cluster(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -586,9 +607,9 @@ async def stop_cluster( from google.cloud import dataproc_v1 - def sample_stop_cluster(): + async def sample_stop_cluster(): # Create a client - client = dataproc_v1.ClusterControllerClient() + client = dataproc_v1.ClusterControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.StopClusterRequest( @@ -602,7 +623,7 @@ def sample_stop_cluster(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -635,6 +656,18 @@ def sample_stop_cluster(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -668,9 +701,9 @@ async def start_cluster( from google.cloud import dataproc_v1 - def sample_start_cluster(): + async def sample_start_cluster(): # Create a client - client = dataproc_v1.ClusterControllerClient() + client = dataproc_v1.ClusterControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.StartClusterRequest( @@ -684,7 +717,7 @@ def sample_start_cluster(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -717,6 +750,18 @@ def sample_start_cluster(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -752,14 +797,13 @@ async def delete_cluster( be `ClusterOperationMetadata `__. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_delete_cluster(): + async def sample_delete_cluster(): # Create a client - client = dataproc_v1.ClusterControllerClient() + client = dataproc_v1.ClusterControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.DeleteClusterRequest( @@ -773,7 +817,7 @@ def sample_delete_cluster(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -864,6 +908,18 @@ def sample_delete_cluster(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -897,14 +953,13 @@ async def get_cluster( r"""Gets the resource representation for a cluster in a project. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_get_cluster(): + async def sample_get_cluster(): # Create a client - client = dataproc_v1.ClusterControllerClient() + client = dataproc_v1.ClusterControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.GetClusterRequest( @@ -914,7 +969,7 @@ def sample_get_cluster(): ) # Make the request - response = client.get_cluster(request=request) + response = await client.get_cluster(request=request) # Handle the response print(response) @@ -996,6 +1051,18 @@ def sample_get_cluster(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -1021,14 +1088,13 @@ async def list_clusters( r"""Lists all regions/{region}/clusters in a project alphabetically. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_list_clusters(): + async def sample_list_clusters(): # Create a client - client = dataproc_v1.ClusterControllerClient() + client = dataproc_v1.ClusterControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.ListClustersRequest( @@ -1040,7 +1106,7 @@ def sample_list_clusters(): page_result = client.list_clusters(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -1145,6 +1211,17 @@ def sample_list_clusters(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -1185,14 +1262,13 @@ async def diagnose_cluster( contains `DiagnoseClusterResults `__. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_diagnose_cluster(): + async def sample_diagnose_cluster(): # Create a client - client = dataproc_v1.ClusterControllerClient() + client = dataproc_v1.ClusterControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.DiagnoseClusterRequest( @@ -1206,7 +1282,7 @@ def sample_diagnose_cluster(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -1288,6 +1364,18 @@ def sample_diagnose_cluster(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = await rpc( request, diff --git a/google/cloud/dataproc_v1/services/cluster_controller/client.py b/google/cloud/dataproc_v1/services/cluster_controller/client.py index b7f031eb..39f66463 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/client.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -431,6 +431,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def create_cluster( @@ -449,7 +450,6 @@ def create_cluster( be `ClusterOperationMetadata `__. - .. code-block:: python from google.cloud import dataproc_v1 @@ -545,6 +545,17 @@ def sample_create_cluster(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_cluster] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + # Send the request. response = rpc( request, @@ -585,7 +596,6 @@ def update_cluster( [``RUNNING``][google.cloud.dataproc.v1.ClusterStatus.State] state or an error is returned. - .. code-block:: python from google.cloud import dataproc_v1 @@ -759,6 +769,18 @@ def sample_update_cluster(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_cluster] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = rpc( request, @@ -842,6 +864,18 @@ def sample_stop_cluster(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.stop_cluster] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = rpc( request, @@ -925,6 +959,18 @@ def sample_start_cluster(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.start_cluster] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = rpc( request, @@ -960,7 +1006,6 @@ def delete_cluster( be `ClusterOperationMetadata `__. - .. code-block:: python from google.cloud import dataproc_v1 @@ -1063,6 +1108,18 @@ def sample_delete_cluster(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_cluster] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = rpc( request, @@ -1096,7 +1153,6 @@ def get_cluster( r"""Gets the resource representation for a cluster in a project. - .. code-block:: python from google.cloud import dataproc_v1 @@ -1184,6 +1240,18 @@ def sample_get_cluster(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_cluster] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = rpc( request, @@ -1209,7 +1277,6 @@ def list_clusters( r"""Lists all regions/{region}/clusters in a project alphabetically. - .. code-block:: python from google.cloud import dataproc_v1 @@ -1322,6 +1389,17 @@ def sample_list_clusters(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_clusters] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + # Send the request. response = rpc( request, @@ -1362,7 +1440,6 @@ def diagnose_cluster( contains `DiagnoseClusterResults `__. - .. code-block:: python from google.cloud import dataproc_v1 @@ -1456,6 +1533,18 @@ def sample_diagnose_cluster(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.diagnose_cluster] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + # Send the request. response = rpc( request, diff --git a/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py b/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py index 2fa2ac05..3cc68102 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py @@ -56,6 +56,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -82,10 +83,6 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} @@ -107,6 +104,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -119,6 +121,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -308,5 +315,9 @@ def diagnose_cluster( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("ClusterControllerTransport",) diff --git a/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py b/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py index a2c8d977..73522293 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py @@ -61,6 +61,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -157,6 +158,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: @@ -475,5 +477,9 @@ def diagnose_cluster( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("ClusterControllerGrpcTransport",) diff --git a/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py b/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py index 6ebeb2a3..182d7278 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py @@ -106,6 +106,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -202,6 +203,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/dataproc_v1/services/job_controller/async_client.py b/google/cloud/dataproc_v1/services/job_controller/async_client.py index 866e6ab9..eac60439 100644 --- a/google/cloud/dataproc_v1/services/job_controller/async_client.py +++ b/google/cloud/dataproc_v1/services/job_controller/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -219,9 +219,9 @@ async def submit_job( from google.cloud import dataproc_v1 - def sample_submit_job(): + async def sample_submit_job(): # Create a client - client = dataproc_v1.JobControllerClient() + client = dataproc_v1.JobControllerAsyncClient() # Initialize request argument(s) job = dataproc_v1.Job() @@ -235,7 +235,7 @@ def sample_submit_job(): ) # Make the request - response = client.submit_job(request=request) + response = await client.submit_job(request=request) # Handle the response print(response) @@ -311,6 +311,17 @@ def sample_submit_job(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -339,9 +350,9 @@ async def submit_job_as_operation( from google.cloud import dataproc_v1 - def sample_submit_job_as_operation(): + async def sample_submit_job_as_operation(): # Create a client - client = dataproc_v1.JobControllerClient() + client = dataproc_v1.JobControllerAsyncClient() # Initialize request argument(s) job = dataproc_v1.Job() @@ -359,7 +370,7 @@ def sample_submit_job_as_operation(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -440,6 +451,17 @@ def sample_submit_job_as_operation(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -473,14 +495,13 @@ async def get_job( r"""Gets the resource representation for a job in a project. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_get_job(): + async def sample_get_job(): # Create a client - client = dataproc_v1.JobControllerClient() + client = dataproc_v1.JobControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.GetJobRequest( @@ -490,7 +511,7 @@ def sample_get_job(): ) # Make the request - response = client.get_job(request=request) + response = await client.get_job(request=request) # Handle the response print(response) @@ -569,6 +590,18 @@ def sample_get_job(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -597,9 +630,9 @@ async def list_jobs( from google.cloud import dataproc_v1 - def sample_list_jobs(): + async def sample_list_jobs(): # Create a client - client = dataproc_v1.JobControllerClient() + client = dataproc_v1.JobControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.ListJobsRequest( @@ -611,7 +644,7 @@ def sample_list_jobs(): page_result = client.list_jobs(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -708,6 +741,17 @@ def sample_list_jobs(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -742,9 +786,9 @@ async def update_job( from google.cloud import dataproc_v1 - def sample_update_job(): + async def sample_update_job(): # Create a client - client = dataproc_v1.JobControllerClient() + client = dataproc_v1.JobControllerAsyncClient() # Initialize request argument(s) job = dataproc_v1.Job() @@ -759,7 +803,7 @@ def sample_update_job(): ) # Make the request - response = client.update_job(request=request) + response = await client.update_job(request=request) # Handle the response print(response) @@ -797,6 +841,18 @@ def sample_update_job(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -825,14 +881,13 @@ async def cancel_job( or `regions/{region}/jobs.get `__. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_cancel_job(): + async def sample_cancel_job(): # Create a client - client = dataproc_v1.JobControllerClient() + client = dataproc_v1.JobControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.CancelJobRequest( @@ -842,7 +897,7 @@ def sample_cancel_job(): ) # Make the request - response = client.cancel_job(request=request) + response = await client.cancel_job(request=request) # Handle the response print(response) @@ -920,6 +975,18 @@ def sample_cancel_job(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + # Send the request. response = await rpc( request, @@ -945,14 +1012,13 @@ async def delete_job( r"""Deletes the job from the project. If the job is active, the delete fails, and the response returns ``FAILED_PRECONDITION``. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_delete_job(): + async def sample_delete_job(): # Create a client - client = dataproc_v1.JobControllerClient() + client = dataproc_v1.JobControllerAsyncClient() # Initialize request argument(s) request = dataproc_v1.DeleteJobRequest( @@ -962,7 +1028,7 @@ def sample_delete_job(): ) # Make the request - client.delete_job(request=request) + await client.delete_job(request=request) Args: request (Union[google.cloud.dataproc_v1.types.DeleteJobRequest, dict]): @@ -1031,6 +1097,18 @@ def sample_delete_job(): client_info=DEFAULT_CLIENT_INFO, ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + # Send the request. await rpc( request, diff --git a/google/cloud/dataproc_v1/services/job_controller/client.py b/google/cloud/dataproc_v1/services/job_controller/client.py index 62015b2b..45a6941e 100644 --- a/google/cloud/dataproc_v1/services/job_controller/client.py +++ b/google/cloud/dataproc_v1/services/job_controller/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -402,6 +402,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def submit_job( @@ -504,6 +505,17 @@ def sample_submit_job(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.submit_job] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + # Send the request. response = rpc( request, @@ -624,6 +636,17 @@ def sample_submit_job_as_operation(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.submit_job_as_operation] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + # Send the request. response = rpc( request, @@ -657,7 +680,6 @@ def get_job( r"""Gets the resource representation for a job in a project. - .. code-block:: python from google.cloud import dataproc_v1 @@ -742,6 +764,18 @@ def sample_get_job(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_job] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + # Send the request. response = rpc( request, @@ -870,6 +904,17 @@ def sample_list_jobs(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_jobs] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + # Send the request. response = rpc( request, @@ -951,6 +996,18 @@ def sample_update_job(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_job] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + # Send the request. response = rpc( request, @@ -979,7 +1036,6 @@ def cancel_job( or `regions/{region}/jobs.get `__. - .. code-block:: python from google.cloud import dataproc_v1 @@ -1063,6 +1119,18 @@ def sample_cancel_job(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.cancel_job] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + # Send the request. response = rpc( request, @@ -1088,7 +1156,6 @@ def delete_job( r"""Deletes the job from the project. If the job is active, the delete fails, and the response returns ``FAILED_PRECONDITION``. - .. code-block:: python from google.cloud import dataproc_v1 @@ -1165,6 +1232,18 @@ def sample_delete_job(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_job] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + # Send the request. rpc( request, diff --git a/google/cloud/dataproc_v1/services/job_controller/transports/base.py b/google/cloud/dataproc_v1/services/job_controller/transports/base.py index 52f16ec8..9e6d02b0 100644 --- a/google/cloud/dataproc_v1/services/job_controller/transports/base.py +++ b/google/cloud/dataproc_v1/services/job_controller/transports/base.py @@ -57,6 +57,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -83,10 +84,6 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} @@ -108,6 +105,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -120,6 +122,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -293,5 +300,9 @@ def delete_job( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("JobControllerTransport",) diff --git a/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py b/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py index 420b7956..e2bff3e0 100644 --- a/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py +++ b/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py @@ -61,6 +61,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -157,6 +158,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: @@ -424,5 +426,9 @@ def delete_job(self) -> Callable[[jobs.DeleteJobRequest], empty_pb2.Empty]: def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("JobControllerGrpcTransport",) diff --git a/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py b/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py index e97072a9..c9454d3a 100644 --- a/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py @@ -106,6 +106,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -202,6 +203,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py b/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py index b50c39f0..0e3c256e 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -236,9 +236,9 @@ async def create_workflow_template( from google.cloud import dataproc_v1 - def sample_create_workflow_template(): + async def sample_create_workflow_template(): # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() # Initialize request argument(s) template = dataproc_v1.WorkflowTemplate() @@ -253,7 +253,7 @@ def sample_create_workflow_template(): ) # Make the request - response = client.create_workflow_template(request=request) + response = await client.create_workflow_template(request=request) # Handle the response print(response) @@ -364,14 +364,13 @@ async def get_workflow_template( Can retrieve previously instantiated template by specifying optional version parameter. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_get_workflow_template(): + async def sample_get_workflow_template(): # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() # Initialize request argument(s) request = dataproc_v1.GetWorkflowTemplateRequest( @@ -379,7 +378,7 @@ def sample_get_workflow_template(): ) # Make the request - response = client.get_workflow_template(request=request) + response = await client.get_workflow_template(request=request) # Handle the response print(response) @@ -478,9 +477,7 @@ async def instantiate_workflow_template( ] = None, *, name: str = None, - parameters: Sequence[ - workflow_templates.InstantiateWorkflowTemplateRequest.ParametersEntry - ] = None, + parameters: Mapping[str, str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -507,14 +504,13 @@ async def instantiate_workflow_template( [Operation.response][google.longrunning.Operation.response] will be [Empty][google.protobuf.Empty]. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_instantiate_workflow_template(): + async def sample_instantiate_workflow_template(): # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() # Initialize request argument(s) request = dataproc_v1.InstantiateWorkflowTemplateRequest( @@ -526,7 +522,7 @@ def sample_instantiate_workflow_template(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -555,7 +551,7 @@ def sample_instantiate_workflow_template(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - parameters (:class:`Sequence[google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest.ParametersEntry]`): + parameters (:class:`Mapping[str, str]`): Optional. Map from parameter names to values that should be used for those parameters. Values may not exceed 1000 @@ -690,14 +686,13 @@ async def instantiate_inline_workflow_template( [Operation.response][google.longrunning.Operation.response] will be [Empty][google.protobuf.Empty]. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_instantiate_inline_workflow_template(): + async def sample_instantiate_inline_workflow_template(): # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() # Initialize request argument(s) template = dataproc_v1.WorkflowTemplate() @@ -716,7 +711,7 @@ def sample_instantiate_inline_workflow_template(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -850,14 +845,13 @@ async def update_workflow_template( template must contain version that matches the current server version. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_update_workflow_template(): + async def sample_update_workflow_template(): # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() # Initialize request argument(s) template = dataproc_v1.WorkflowTemplate() @@ -871,7 +865,7 @@ def sample_update_workflow_template(): ) # Make the request - response = client.update_workflow_template(request=request) + response = await client.update_workflow_template(request=request) # Handle the response print(response) @@ -966,14 +960,13 @@ async def list_workflow_templates( r"""Lists workflows that match the specified filter in the request. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_list_workflow_templates(): + async def sample_list_workflow_templates(): # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() # Initialize request argument(s) request = dataproc_v1.ListWorkflowTemplatesRequest( @@ -984,7 +977,7 @@ def sample_list_workflow_templates(): page_result = client.list_workflow_templates(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -1097,14 +1090,13 @@ async def delete_workflow_template( r"""Deletes a workflow template. It does not cancel in-progress workflows. - .. code-block:: python from google.cloud import dataproc_v1 - def sample_delete_workflow_template(): + async def sample_delete_workflow_template(): # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() # Initialize request argument(s) request = dataproc_v1.DeleteWorkflowTemplateRequest( @@ -1112,7 +1104,7 @@ def sample_delete_workflow_template(): ) # Make the request - client.delete_workflow_template(request=request) + await client.delete_workflow_template(request=request) Args: request (Union[google.cloud.dataproc_v1.types.DeleteWorkflowTemplateRequest, dict]): diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/client.py b/google/cloud/dataproc_v1/services/workflow_template_service/client.py index 30cc058f..b141ce37 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/client.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -453,6 +453,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def create_workflow_template( @@ -590,7 +591,6 @@ def get_workflow_template( Can retrieve previously instantiated template by specifying optional version parameter. - .. code-block:: python from google.cloud import dataproc_v1 @@ -693,9 +693,7 @@ def instantiate_workflow_template( ] = None, *, name: str = None, - parameters: Sequence[ - workflow_templates.InstantiateWorkflowTemplateRequest.ParametersEntry - ] = None, + parameters: Mapping[str, str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -722,7 +720,6 @@ def instantiate_workflow_template( [Operation.response][google.longrunning.Operation.response] will be [Empty][google.protobuf.Empty]. - .. code-block:: python from google.cloud import dataproc_v1 @@ -770,7 +767,7 @@ def sample_instantiate_workflow_template(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - parameters (Sequence[google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest.ParametersEntry]): + parameters (Mapping[str, str]): Optional. Map from parameter names to values that should be used for those parameters. Values may not exceed 1000 @@ -899,7 +896,6 @@ def instantiate_inline_workflow_template( [Operation.response][google.longrunning.Operation.response] will be [Empty][google.protobuf.Empty]. - .. code-block:: python from google.cloud import dataproc_v1 @@ -1056,7 +1052,6 @@ def update_workflow_template( template must contain version that matches the current server version. - .. code-block:: python from google.cloud import dataproc_v1 @@ -1163,7 +1158,6 @@ def list_workflow_templates( r"""Lists workflows that match the specified filter in the request. - .. code-block:: python from google.cloud import dataproc_v1 @@ -1283,7 +1277,6 @@ def delete_workflow_template( r"""Deletes a workflow template. It does not cancel in-progress workflows. - .. code-block:: python from google.cloud import dataproc_v1 diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py b/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py index f220cbc6..779f15b6 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py @@ -57,6 +57,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -83,10 +84,6 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} @@ -108,6 +105,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -120,6 +122,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -316,5 +323,9 @@ def delete_workflow_template( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("WorkflowTemplateServiceTransport",) diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py b/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py index 4d373025..758128b8 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py @@ -62,6 +62,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -158,6 +159,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: @@ -505,5 +507,9 @@ def delete_workflow_template( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("WorkflowTemplateServiceGrpcTransport",) diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py b/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py index 139fe9db..686b55a8 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py @@ -107,6 +107,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -203,6 +204,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/dataproc_v1/types/autoscaling_policies.py b/google/cloud/dataproc_v1/types/autoscaling_policies.py index b1685e6b..56e4c942 100644 --- a/google/cloud/dataproc_v1/types/autoscaling_policies.py +++ b/google/cloud/dataproc_v1/types/autoscaling_policies.py @@ -71,7 +71,7 @@ class AutoscalingPolicy(proto.Message): secondary_worker_config (google.cloud.dataproc_v1.types.InstanceGroupAutoscalingPolicyConfig): Optional. Describes how the autoscaler will operate for secondary workers. - labels (Sequence[google.cloud.dataproc_v1.types.AutoscalingPolicy.LabelsEntry]): + labels (Mapping[str, str]): Optional. The labels to associate with this autoscaling policy. Label **keys** must contain 1 to 63 characters, and must conform to `RFC @@ -116,9 +116,13 @@ class AutoscalingPolicy(proto.Message): class BasicAutoscalingAlgorithm(proto.Message): r"""Basic algorithm for autoscaling. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: yarn_config (google.cloud.dataproc_v1.types.BasicYarnAutoscalingConfig): Required. YARN autoscaling configuration. + + This field is a member of `oneof`_ ``config``. cooldown_period (google.protobuf.duration_pb2.Duration): Optional. Duration between scaling events. A scaling period starts after the update operation from the previous event @@ -130,6 +134,7 @@ class BasicAutoscalingAlgorithm(proto.Message): yarn_config = proto.Field( proto.MESSAGE, number=1, + oneof="config", message="BasicYarnAutoscalingConfig", ) cooldown_period = proto.Field( diff --git a/google/cloud/dataproc_v1/types/batches.py b/google/cloud/dataproc_v1/types/batches.py index 761b3fb7..e014bff1 100644 --- a/google/cloud/dataproc_v1/types/batches.py +++ b/google/cloud/dataproc_v1/types/batches.py @@ -225,7 +225,7 @@ class Batch(proto.Message): creator (str): Output only. The email address of the user who created the batch. - labels (Sequence[google.cloud.dataproc_v1.types.Batch.LabelsEntry]): + labels (Mapping[str, str]): Optional. The labels to associate with this batch. Label **keys** must contain 1 to 63 characters, and must conform to `RFC 1035 `__. @@ -547,7 +547,7 @@ class SparkSqlBatch(proto.Message): query_file_uri (str): Required. The HCFS URI of the script that contains Spark SQL queries to execute. - query_variables (Sequence[google.cloud.dataproc_v1.types.SparkSqlBatch.QueryVariablesEntry]): + query_variables (Mapping[str, str]): Optional. Mapping of query variable names to values (equivalent to the Spark SQL command: ``SET name="value";``). diff --git a/google/cloud/dataproc_v1/types/clusters.py b/google/cloud/dataproc_v1/types/clusters.py index 573d2302..cd201379 100644 --- a/google/cloud/dataproc_v1/types/clusters.py +++ b/google/cloud/dataproc_v1/types/clusters.py @@ -89,7 +89,7 @@ class Cluster(proto.Message): Note that Dataproc may set default values, and values may change when clusters are updated. Exactly one of config or virtualClusterConfig must be specified. - labels (Sequence[google.cloud.dataproc_v1.types.Cluster.LabelsEntry]): + labels (Mapping[str, str]): Optional. The labels to associate with this cluster. Label **keys** must contain 1 to 63 characters, and must conform to `RFC 1035 `__. @@ -327,20 +327,6 @@ class VirtualClusterConfig(proto.Message): buckets `__). **This field requires a Cloud Storage bucket name, not a ``gs://...`` URI to a Cloud Storage bucket.** - temp_bucket (str): - Optional. A Cloud Storage bucket used to store ephemeral - cluster and jobs data, such as Spark and MapReduce history - files. If you do not specify a temp bucket, Dataproc will - determine a Cloud Storage location (US, ASIA, or EU) for - your cluster's temp bucket according to the Compute Engine - zone where your cluster is deployed, and then create and - manage this project-level, per-location bucket. The default - bucket has a TTL of 90 days, but you can use any TTL (or - none) if you specify a bucket (see `Dataproc staging and - temp - buckets `__). - **This field requires a Cloud Storage bucket name, not a - ``gs://...`` URI to a Cloud Storage bucket.** kubernetes_cluster_config (google.cloud.dataproc_v1.types.KubernetesClusterConfig): Required. The configuration for running the Dataproc cluster on Kubernetes. @@ -355,10 +341,6 @@ class VirtualClusterConfig(proto.Message): proto.STRING, number=1, ) - temp_bucket = proto.Field( - proto.STRING, - number=2, - ) kubernetes_cluster_config = proto.Field( proto.MESSAGE, number=6, @@ -400,7 +382,7 @@ class EndpointConfig(proto.Message): r"""Endpoint config for this cluster Attributes: - http_ports (Sequence[google.cloud.dataproc_v1.types.EndpointConfig.HttpPortsEntry]): + http_ports (Mapping[str, str]): Output only. The map of port descriptions to URLs. Will only be populated if enable_http_port_access is true. enable_http_port_access (bool): @@ -543,7 +525,7 @@ class GceClusterConfig(proto.Message): The Compute Engine tags to add to all instances (see `Tagging instances `__). - metadata (Sequence[google.cloud.dataproc_v1.types.GceClusterConfig.MetadataEntry]): + metadata (Mapping[str, str]): The Compute Engine metadata entries to add to all instances (see `Project and instance metadata `__). @@ -1203,7 +1185,7 @@ class IdentityConfig(proto.Message): based secure multi-tenancy user mappings. Attributes: - user_service_account_mapping (Sequence[google.cloud.dataproc_v1.types.IdentityConfig.UserServiceAccountMappingEntry]): + user_service_account_mapping (Mapping[str, str]): Required. Map of user to service account. """ @@ -1227,7 +1209,7 @@ class SoftwareConfig(proto.Message): "1.2.29"), or the `"preview" version `__. If unspecified, it defaults to the latest Debian version. - properties (Sequence[google.cloud.dataproc_v1.types.SoftwareConfig.PropertiesEntry]): + properties (Mapping[str, str]): Optional. The properties to set on daemon config files. Property keys are specified in ``prefix:property`` format, @@ -1356,9 +1338,9 @@ class ClusterMetrics(proto.Message): only. It may be changed before final release. Attributes: - hdfs_metrics (Sequence[google.cloud.dataproc_v1.types.ClusterMetrics.HdfsMetricsEntry]): + hdfs_metrics (Mapping[str, int]): The HDFS metrics. - yarn_metrics (Sequence[google.cloud.dataproc_v1.types.ClusterMetrics.YarnMetricsEntry]): + yarn_metrics (Mapping[str, int]): The YARN metrics. """ diff --git a/google/cloud/dataproc_v1/types/jobs.py b/google/cloud/dataproc_v1/types/jobs.py index a4161903..2e785931 100644 --- a/google/cloud/dataproc_v1/types/jobs.py +++ b/google/cloud/dataproc_v1/types/jobs.py @@ -54,7 +54,7 @@ class LoggingConfig(proto.Message): r"""The runtime logging config of the job. Attributes: - driver_log_levels (Sequence[google.cloud.dataproc_v1.types.LoggingConfig.DriverLogLevelsEntry]): + driver_log_levels (Mapping[str, google.cloud.dataproc_v1.types.LoggingConfig.Level]): The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: @@ -131,7 +131,7 @@ class HadoopJob(proto.Message): extracted in the working directory of Hadoop drivers and tasks. Supported file types: .jar, .tar, .tar.gz, .tgz, or .zip. - properties (Sequence[google.cloud.dataproc_v1.types.HadoopJob.PropertiesEntry]): + properties (Mapping[str, str]): Optional. A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Dataproc API may be overwritten. Can include @@ -220,7 +220,7 @@ class SparkJob(proto.Message): extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. - properties (Sequence[google.cloud.dataproc_v1.types.SparkJob.PropertiesEntry]): + properties (Mapping[str, str]): Optional. A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Dataproc API may @@ -300,7 +300,7 @@ class PySparkJob(proto.Message): extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. - properties (Sequence[google.cloud.dataproc_v1.types.PySparkJob.PropertiesEntry]): + properties (Mapping[str, str]): Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc @@ -404,10 +404,10 @@ class HiveJob(proto.Message): Optional. Whether to continue executing queries if a query fails. The default value is ``false``. Setting to ``true`` can be useful when executing independent parallel queries. - script_variables (Sequence[google.cloud.dataproc_v1.types.HiveJob.ScriptVariablesEntry]): + script_variables (Mapping[str, str]): Optional. Mapping of query variable names to values (equivalent to the Hive command: ``SET name="value";``). - properties (Sequence[google.cloud.dataproc_v1.types.HiveJob.PropertiesEntry]): + properties (Mapping[str, str]): Optional. A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties @@ -472,11 +472,11 @@ class SparkSqlJob(proto.Message): A list of queries. This field is a member of `oneof`_ ``queries``. - script_variables (Sequence[google.cloud.dataproc_v1.types.SparkSqlJob.ScriptVariablesEntry]): + script_variables (Mapping[str, str]): Optional. Mapping of query variable names to values (equivalent to the Spark SQL command: SET ``name="value";``). - properties (Sequence[google.cloud.dataproc_v1.types.SparkSqlJob.PropertiesEntry]): + properties (Mapping[str, str]): Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the @@ -546,10 +546,10 @@ class PigJob(proto.Message): Optional. Whether to continue executing queries if a query fails. The default value is ``false``. Setting to ``true`` can be useful when executing independent parallel queries. - script_variables (Sequence[google.cloud.dataproc_v1.types.PigJob.ScriptVariablesEntry]): + script_variables (Mapping[str, str]): Optional. Mapping of query variable names to values (equivalent to the Pig command: ``name=[value]``). - properties (Sequence[google.cloud.dataproc_v1.types.PigJob.PropertiesEntry]): + properties (Mapping[str, str]): Optional. A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties @@ -623,7 +623,7 @@ class SparkRJob(proto.Message): extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. - properties (Sequence[google.cloud.dataproc_v1.types.SparkRJob.PropertiesEntry]): + properties (Mapping[str, str]): Optional. A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc @@ -699,7 +699,7 @@ class PrestoJob(proto.Message): client_tags (Sequence[str]): Optional. Presto client tags to attach to this query - properties (Sequence[google.cloud.dataproc_v1.types.PrestoJob.PropertiesEntry]): + properties (Mapping[str, str]): Optional. A mapping of property names to values. Used to set Presto `session properties `__ @@ -754,7 +754,7 @@ class JobPlacement(proto.Message): cluster_uuid (str): Output only. A cluster UUID generated by the Dataproc service when the job is submitted. - cluster_labels (Sequence[google.cloud.dataproc_v1.types.JobPlacement.ClusterLabelsEntry]): + cluster_labels (Mapping[str, str]): Optional. Cluster labels to identify a cluster where the job will be submitted. """ @@ -993,7 +993,7 @@ class Job(proto.Message): control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as ``driver_output_uri``. - labels (Sequence[google.cloud.dataproc_v1.types.Job.LabelsEntry]): + labels (Mapping[str, str]): Optional. The labels to associate with this job. Label **keys** must contain 1 to 63 characters, and must conform to `RFC 1035 `__. diff --git a/google/cloud/dataproc_v1/types/operations.py b/google/cloud/dataproc_v1/types/operations.py index 7ad81761..572f35f9 100644 --- a/google/cloud/dataproc_v1/types/operations.py +++ b/google/cloud/dataproc_v1/types/operations.py @@ -44,7 +44,7 @@ class BatchOperationMetadata(proto.Message): The operation type. description (str): Short description of the operation. - labels (Sequence[google.cloud.dataproc_v1.types.BatchOperationMetadata.LabelsEntry]): + labels (Mapping[str, str]): Labels associated with the operation. warnings (Sequence[str]): Warnings encountered during operation @@ -155,7 +155,7 @@ class ClusterOperationMetadata(proto.Message): Output only. The operation type. description (str): Output only. Short description of operation. - labels (Sequence[google.cloud.dataproc_v1.types.ClusterOperationMetadata.LabelsEntry]): + labels (Mapping[str, str]): Output only. Labels associated with the operation warnings (Sequence[str]): diff --git a/google/cloud/dataproc_v1/types/shared.py b/google/cloud/dataproc_v1/types/shared.py index d482d90d..e0fd8024 100644 --- a/google/cloud/dataproc_v1/types/shared.py +++ b/google/cloud/dataproc_v1/types/shared.py @@ -72,7 +72,7 @@ class RuntimeConfig(proto.Message): Optional. Optional custom container image for the job runtime environment. If not specified, a default container image will be used. - properties (Sequence[google.cloud.dataproc_v1.types.RuntimeConfig.PropertiesEntry]): + properties (Mapping[str, str]): Optional. A mapping of property names to values, which are used to configure workload execution. @@ -222,7 +222,7 @@ class RuntimeInfo(proto.Message): r"""Runtime information about workload execution. Attributes: - endpoints (Sequence[google.cloud.dataproc_v1.types.RuntimeInfo.EndpointsEntry]): + endpoints (Mapping[str, str]): Output only. Map of remote access endpoints (such as web interfaces and APIs) to their URIs. output_uri (str): @@ -325,14 +325,14 @@ class KubernetesSoftwareConfig(proto.Message): on Kubernetes. Attributes: - component_version (Sequence[google.cloud.dataproc_v1.types.KubernetesSoftwareConfig.ComponentVersionEntry]): + component_version (Mapping[str, str]): The components that should be installed in this Dataproc cluster. The key must be a string from the KubernetesComponent enumeration. The value is the version of the software to be installed. At least one entry must be specified. - properties (Sequence[google.cloud.dataproc_v1.types.KubernetesSoftwareConfig.PropertiesEntry]): + properties (Mapping[str, str]): The properties to set on daemon config files. Property keys are specified in ``prefix:property`` format, diff --git a/google/cloud/dataproc_v1/types/workflow_templates.py b/google/cloud/dataproc_v1/types/workflow_templates.py index b2cb76f1..17bff61c 100644 --- a/google/cloud/dataproc_v1/types/workflow_templates.py +++ b/google/cloud/dataproc_v1/types/workflow_templates.py @@ -84,7 +84,7 @@ class WorkflowTemplate(proto.Message): update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time template was last updated. - labels (Sequence[google.cloud.dataproc_v1.types.WorkflowTemplate.LabelsEntry]): + labels (Mapping[str, str]): Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created by the workflow instance. @@ -228,7 +228,7 @@ class ManagedCluster(proto.Message): characters. config (google.cloud.dataproc_v1.types.ClusterConfig): Required. The cluster configuration. - labels (Sequence[google.cloud.dataproc_v1.types.ManagedCluster.LabelsEntry]): + labels (Mapping[str, str]): Optional. The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and @@ -270,7 +270,7 @@ class ClusterSelector(proto.Message): selection of the cluster. If unspecified, the zone of the first cluster matching the selector is used. - cluster_labels (Sequence[google.cloud.dataproc_v1.types.ClusterSelector.ClusterLabelsEntry]): + cluster_labels (Mapping[str, str]): Required. The cluster labels. Cluster must have all labels to match. """ @@ -342,7 +342,7 @@ class OrderedJob(proto.Message): Optional. Job is a Presto job. This field is a member of `oneof`_ ``job_type``. - labels (Sequence[google.cloud.dataproc_v1.types.OrderedJob.LabelsEntry]): + labels (Mapping[str, str]): Optional. The labels to associate with this job. Label keys must be between 1 and 63 characters long, and @@ -626,7 +626,7 @@ class WorkflowMetadata(proto.Message): Output only. The workflow state. cluster_name (str): Output only. The name of the target cluster. - parameters (Sequence[google.cloud.dataproc_v1.types.WorkflowMetadata.ParametersEntry]): + parameters (Mapping[str, str]): Map from parameter names to values that were used for those parameters. start_time (google.protobuf.timestamp_pb2.Timestamp): @@ -915,7 +915,7 @@ class InstantiateWorkflowTemplateRequest(proto.Message): The tag must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). The maximum length is 40 characters. - parameters (Sequence[google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest.ParametersEntry]): + parameters (Mapping[str, str]): Optional. Map from parameter names to values that should be used for those parameters. Values may not exceed 1000 characters. diff --git a/samples/generated_samples/snippet_metadata_dataproc_v1.json b/samples/generated_samples/snippet_metadata_dataproc_v1.json index 09954e85..127808b7 100644 --- a/samples/generated_samples/snippet_metadata_dataproc_v1.json +++ b/samples/generated_samples/snippet_metadata_dataproc_v1.json @@ -1,16 +1,65 @@ { + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.dataproc.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-dataproc" + }, "snippets": [ { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient", + "shortName": "AutoscalingPolicyServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient.create_autoscaling_policy", "method": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.CreateAutoscalingPolicy", "service": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", "shortName": "AutoscalingPolicyService" }, "shortName": "CreateAutoscalingPolicy" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.CreateAutoscalingPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "policy", + "type": "google.cloud.dataproc_v1.types.AutoscalingPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.AutoscalingPolicy", + "shortName": "create_autoscaling_policy" }, + "description": "Sample for CreateAutoscalingPolicy", "file": "dataproc_v1_generated_autoscaling_policy_service_create_autoscaling_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_CreateAutoscalingPolicy_async", "segments": [ { @@ -43,18 +92,58 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_autoscaling_policy_service_create_autoscaling_policy_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient", + "shortName": "AutoscalingPolicyServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient.create_autoscaling_policy", "method": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.CreateAutoscalingPolicy", "service": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", "shortName": "AutoscalingPolicyService" }, "shortName": "CreateAutoscalingPolicy" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.CreateAutoscalingPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "policy", + "type": "google.cloud.dataproc_v1.types.AutoscalingPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.AutoscalingPolicy", + "shortName": "create_autoscaling_policy" }, + "description": "Sample for CreateAutoscalingPolicy", "file": "dataproc_v1_generated_autoscaling_policy_service_create_autoscaling_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_CreateAutoscalingPolicy_sync", "segments": [ { @@ -87,19 +176,54 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_autoscaling_policy_service_create_autoscaling_policy_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient", + "shortName": "AutoscalingPolicyServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient.delete_autoscaling_policy", "method": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.DeleteAutoscalingPolicy", "service": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", "shortName": "AutoscalingPolicyService" }, "shortName": "DeleteAutoscalingPolicy" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DeleteAutoscalingPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_autoscaling_policy" }, + "description": "Sample for DeleteAutoscalingPolicy", "file": "dataproc_v1_generated_autoscaling_policy_service_delete_autoscaling_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_DeleteAutoscalingPolicy_async", "segments": [ { @@ -130,18 +254,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_autoscaling_policy_service_delete_autoscaling_policy_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient", + "shortName": "AutoscalingPolicyServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient.delete_autoscaling_policy", "method": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.DeleteAutoscalingPolicy", "service": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", "shortName": "AutoscalingPolicyService" }, "shortName": "DeleteAutoscalingPolicy" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DeleteAutoscalingPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_autoscaling_policy" }, + "description": "Sample for DeleteAutoscalingPolicy", "file": "dataproc_v1_generated_autoscaling_policy_service_delete_autoscaling_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_DeleteAutoscalingPolicy_sync", "segments": [ { @@ -172,19 +331,55 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_autoscaling_policy_service_delete_autoscaling_policy_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient", + "shortName": "AutoscalingPolicyServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient.get_autoscaling_policy", "method": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.GetAutoscalingPolicy", "service": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", "shortName": "AutoscalingPolicyService" }, "shortName": "GetAutoscalingPolicy" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.GetAutoscalingPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.AutoscalingPolicy", + "shortName": "get_autoscaling_policy" }, + "description": "Sample for GetAutoscalingPolicy", "file": "dataproc_v1_generated_autoscaling_policy_service_get_autoscaling_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_GetAutoscalingPolicy_async", "segments": [ { @@ -217,18 +412,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_autoscaling_policy_service_get_autoscaling_policy_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient", + "shortName": "AutoscalingPolicyServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient.get_autoscaling_policy", "method": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.GetAutoscalingPolicy", "service": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", "shortName": "AutoscalingPolicyService" }, "shortName": "GetAutoscalingPolicy" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.GetAutoscalingPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.AutoscalingPolicy", + "shortName": "get_autoscaling_policy" }, + "description": "Sample for GetAutoscalingPolicy", "file": "dataproc_v1_generated_autoscaling_policy_service_get_autoscaling_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_GetAutoscalingPolicy_sync", "segments": [ { @@ -261,19 +492,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_autoscaling_policy_service_get_autoscaling_policy_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient", + "shortName": "AutoscalingPolicyServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient.list_autoscaling_policies", "method": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.ListAutoscalingPolicies", "service": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", "shortName": "AutoscalingPolicyService" }, "shortName": "ListAutoscalingPolicies" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.ListAutoscalingPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.services.autoscaling_policy_service.pagers.ListAutoscalingPoliciesAsyncPager", + "shortName": "list_autoscaling_policies" }, + "description": "Sample for ListAutoscalingPolicies", "file": "dataproc_v1_generated_autoscaling_policy_service_list_autoscaling_policies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_ListAutoscalingPolicies_async", "segments": [ { @@ -306,18 +573,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_autoscaling_policy_service_list_autoscaling_policies_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient", + "shortName": "AutoscalingPolicyServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient.list_autoscaling_policies", "method": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.ListAutoscalingPolicies", "service": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", "shortName": "AutoscalingPolicyService" }, "shortName": "ListAutoscalingPolicies" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.ListAutoscalingPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.services.autoscaling_policy_service.pagers.ListAutoscalingPoliciesPager", + "shortName": "list_autoscaling_policies" }, + "description": "Sample for ListAutoscalingPolicies", "file": "dataproc_v1_generated_autoscaling_policy_service_list_autoscaling_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_ListAutoscalingPolicies_sync", "segments": [ { @@ -350,19 +653,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_autoscaling_policy_service_list_autoscaling_policies_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient", + "shortName": "AutoscalingPolicyServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient.update_autoscaling_policy", "method": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.UpdateAutoscalingPolicy", "service": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", "shortName": "AutoscalingPolicyService" }, "shortName": "UpdateAutoscalingPolicy" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.UpdateAutoscalingPolicyRequest" + }, + { + "name": "policy", + "type": "google.cloud.dataproc_v1.types.AutoscalingPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.AutoscalingPolicy", + "shortName": "update_autoscaling_policy" }, + "description": "Sample for UpdateAutoscalingPolicy", "file": "dataproc_v1_generated_autoscaling_policy_service_update_autoscaling_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_UpdateAutoscalingPolicy_async", "segments": [ { @@ -395,18 +734,54 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_autoscaling_policy_service_update_autoscaling_policy_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient", + "shortName": "AutoscalingPolicyServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient.update_autoscaling_policy", "method": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.UpdateAutoscalingPolicy", "service": { + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", "shortName": "AutoscalingPolicyService" }, "shortName": "UpdateAutoscalingPolicy" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.UpdateAutoscalingPolicyRequest" + }, + { + "name": "policy", + "type": "google.cloud.dataproc_v1.types.AutoscalingPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.AutoscalingPolicy", + "shortName": "update_autoscaling_policy" }, + "description": "Sample for UpdateAutoscalingPolicy", "file": "dataproc_v1_generated_autoscaling_policy_service_update_autoscaling_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_UpdateAutoscalingPolicy_sync", "segments": [ { @@ -439,19 +814,63 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_autoscaling_policy_service_update_autoscaling_policy_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient", + "shortName": "BatchControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient.create_batch", "method": { + "fullName": "google.cloud.dataproc.v1.BatchController.CreateBatch", "service": { + "fullName": "google.cloud.dataproc.v1.BatchController", "shortName": "BatchController" }, "shortName": "CreateBatch" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.CreateBatchRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "batch", + "type": "google.cloud.dataproc_v1.types.Batch" + }, + { + "name": "batch_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_batch" }, + "description": "Sample for CreateBatch", "file": "dataproc_v1_generated_batch_controller_create_batch_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_BatchController_CreateBatch_async", "segments": [ { @@ -484,18 +903,62 @@ "start": 50, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_batch_controller_create_batch_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.BatchControllerClient", + "shortName": "BatchControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.BatchControllerClient.create_batch", "method": { + "fullName": "google.cloud.dataproc.v1.BatchController.CreateBatch", "service": { + "fullName": "google.cloud.dataproc.v1.BatchController", "shortName": "BatchController" }, "shortName": "CreateBatch" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.CreateBatchRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "batch", + "type": "google.cloud.dataproc_v1.types.Batch" + }, + { + "name": "batch_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_batch" }, + "description": "Sample for CreateBatch", "file": "dataproc_v1_generated_batch_controller_create_batch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_BatchController_CreateBatch_sync", "segments": [ { @@ -528,19 +991,54 @@ "start": 50, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_batch_controller_create_batch_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient", + "shortName": "BatchControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient.delete_batch", "method": { + "fullName": "google.cloud.dataproc.v1.BatchController.DeleteBatch", "service": { + "fullName": "google.cloud.dataproc.v1.BatchController", "shortName": "BatchController" }, "shortName": "DeleteBatch" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DeleteBatchRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_batch" }, + "description": "Sample for DeleteBatch", "file": "dataproc_v1_generated_batch_controller_delete_batch_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_BatchController_DeleteBatch_async", "segments": [ { @@ -571,18 +1069,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_batch_controller_delete_batch_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.BatchControllerClient", + "shortName": "BatchControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.BatchControllerClient.delete_batch", "method": { + "fullName": "google.cloud.dataproc.v1.BatchController.DeleteBatch", "service": { + "fullName": "google.cloud.dataproc.v1.BatchController", "shortName": "BatchController" }, "shortName": "DeleteBatch" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DeleteBatchRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_batch" }, + "description": "Sample for DeleteBatch", "file": "dataproc_v1_generated_batch_controller_delete_batch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_BatchController_DeleteBatch_sync", "segments": [ { @@ -613,19 +1146,55 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_batch_controller_delete_batch_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient", + "shortName": "BatchControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient.get_batch", "method": { + "fullName": "google.cloud.dataproc.v1.BatchController.GetBatch", "service": { + "fullName": "google.cloud.dataproc.v1.BatchController", "shortName": "BatchController" }, "shortName": "GetBatch" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.GetBatchRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Batch", + "shortName": "get_batch" }, + "description": "Sample for GetBatch", "file": "dataproc_v1_generated_batch_controller_get_batch_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_BatchController_GetBatch_async", "segments": [ { @@ -658,18 +1227,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_batch_controller_get_batch_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.BatchControllerClient", + "shortName": "BatchControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.BatchControllerClient.get_batch", "method": { + "fullName": "google.cloud.dataproc.v1.BatchController.GetBatch", "service": { + "fullName": "google.cloud.dataproc.v1.BatchController", "shortName": "BatchController" }, "shortName": "GetBatch" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.GetBatchRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Batch", + "shortName": "get_batch" }, + "description": "Sample for GetBatch", "file": "dataproc_v1_generated_batch_controller_get_batch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_BatchController_GetBatch_sync", "segments": [ { @@ -702,19 +1307,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_batch_controller_get_batch_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient", + "shortName": "BatchControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient.list_batches", "method": { + "fullName": "google.cloud.dataproc.v1.BatchController.ListBatches", "service": { + "fullName": "google.cloud.dataproc.v1.BatchController", "shortName": "BatchController" }, "shortName": "ListBatches" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.ListBatchesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.services.batch_controller.pagers.ListBatchesAsyncPager", + "shortName": "list_batches" }, + "description": "Sample for ListBatches", "file": "dataproc_v1_generated_batch_controller_list_batches_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_BatchController_ListBatches_async", "segments": [ { @@ -747,18 +1388,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_batch_controller_list_batches_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.BatchControllerClient", + "shortName": "BatchControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.BatchControllerClient.list_batches", "method": { + "fullName": "google.cloud.dataproc.v1.BatchController.ListBatches", "service": { + "fullName": "google.cloud.dataproc.v1.BatchController", "shortName": "BatchController" }, "shortName": "ListBatches" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.ListBatchesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.services.batch_controller.pagers.ListBatchesPager", + "shortName": "list_batches" }, + "description": "Sample for ListBatches", "file": "dataproc_v1_generated_batch_controller_list_batches_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_BatchController_ListBatches_sync", "segments": [ { @@ -791,19 +1468,63 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_batch_controller_list_batches_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", + "shortName": "ClusterControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.create_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.CreateCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "CreateCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.CreateClusterRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "cluster", + "type": "google.cloud.dataproc_v1.types.Cluster" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_cluster" }, + "description": "Sample for CreateCluster", "file": "dataproc_v1_generated_cluster_controller_create_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_CreateCluster_async", "segments": [ { @@ -836,18 +1557,62 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_create_cluster_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", + "shortName": "ClusterControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.create_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.CreateCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "CreateCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.CreateClusterRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "cluster", + "type": "google.cloud.dataproc_v1.types.Cluster" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_cluster" }, + "description": "Sample for CreateCluster", "file": "dataproc_v1_generated_cluster_controller_create_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_CreateCluster_sync", "segments": [ { @@ -880,19 +1645,63 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_create_cluster_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", + "shortName": "ClusterControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.delete_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.DeleteCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "DeleteCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DeleteClusterRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "cluster_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_cluster" }, + "description": "Sample for DeleteCluster", "file": "dataproc_v1_generated_cluster_controller_delete_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_DeleteCluster_async", "segments": [ { @@ -925,18 +1734,62 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_delete_cluster_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", + "shortName": "ClusterControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.delete_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.DeleteCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "DeleteCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DeleteClusterRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "cluster_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_cluster" }, + "description": "Sample for DeleteCluster", "file": "dataproc_v1_generated_cluster_controller_delete_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_DeleteCluster_sync", "segments": [ { @@ -969,19 +1822,63 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_delete_cluster_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", + "shortName": "ClusterControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.diagnose_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.DiagnoseCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "DiagnoseCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DiagnoseClusterRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "cluster_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "diagnose_cluster" }, + "description": "Sample for DiagnoseCluster", "file": "dataproc_v1_generated_cluster_controller_diagnose_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_DiagnoseCluster_async", "segments": [ { @@ -1014,18 +1911,62 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_diagnose_cluster_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", + "shortName": "ClusterControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.diagnose_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.DiagnoseCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "DiagnoseCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DiagnoseClusterRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "cluster_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "diagnose_cluster" }, + "description": "Sample for DiagnoseCluster", "file": "dataproc_v1_generated_cluster_controller_diagnose_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_DiagnoseCluster_sync", "segments": [ { @@ -1058,19 +1999,63 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_diagnose_cluster_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", + "shortName": "ClusterControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.get_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.GetCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "GetCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.GetClusterRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "cluster_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Cluster", + "shortName": "get_cluster" }, + "description": "Sample for GetCluster", "file": "dataproc_v1_generated_cluster_controller_get_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_GetCluster_async", "segments": [ { @@ -1103,18 +2088,62 @@ "start": 44, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_get_cluster_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", + "shortName": "ClusterControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.get_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.GetCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "GetCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.GetClusterRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "cluster_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Cluster", + "shortName": "get_cluster" }, + "description": "Sample for GetCluster", "file": "dataproc_v1_generated_cluster_controller_get_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_GetCluster_sync", "segments": [ { @@ -1147,19 +2176,63 @@ "start": 44, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_get_cluster_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", + "shortName": "ClusterControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.list_clusters", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.ListClusters", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "ListClusters" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.ListClustersRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.services.cluster_controller.pagers.ListClustersAsyncPager", + "shortName": "list_clusters" }, + "description": "Sample for ListClusters", "file": "dataproc_v1_generated_cluster_controller_list_clusters_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_ListClusters_async", "segments": [ { @@ -1192,18 +2265,62 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_list_clusters_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", + "shortName": "ClusterControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.list_clusters", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.ListClusters", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "ListClusters" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.ListClustersRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.services.cluster_controller.pagers.ListClustersPager", + "shortName": "list_clusters" }, + "description": "Sample for ListClusters", "file": "dataproc_v1_generated_cluster_controller_list_clusters_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_ListClusters_sync", "segments": [ { @@ -1236,19 +2353,51 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_list_clusters_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", + "shortName": "ClusterControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.start_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.StartCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "StartCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.StartClusterRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "start_cluster" }, + "description": "Sample for StartCluster", "file": "dataproc_v1_generated_cluster_controller_start_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_StartCluster_async", "segments": [ { @@ -1281,18 +2430,50 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_start_cluster_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", + "shortName": "ClusterControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.start_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.StartCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "StartCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.StartClusterRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "start_cluster" }, + "description": "Sample for StartCluster", "file": "dataproc_v1_generated_cluster_controller_start_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_StartCluster_sync", "segments": [ { @@ -1325,19 +2506,51 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_start_cluster_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", + "shortName": "ClusterControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.stop_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.StopCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "StopCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.StopClusterRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "stop_cluster" }, + "description": "Sample for StopCluster", "file": "dataproc_v1_generated_cluster_controller_stop_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_StopCluster_async", "segments": [ { @@ -1370,18 +2583,50 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_stop_cluster_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", + "shortName": "ClusterControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.stop_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.StopCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "StopCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.StopClusterRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "stop_cluster" }, + "description": "Sample for StopCluster", "file": "dataproc_v1_generated_cluster_controller_stop_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_StopCluster_sync", "segments": [ { @@ -1414,19 +2659,71 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_stop_cluster_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", + "shortName": "ClusterControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.update_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.UpdateCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "UpdateCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.UpdateClusterRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "cluster_name", + "type": "str" + }, + { + "name": "cluster", + "type": "google.cloud.dataproc_v1.types.Cluster" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_cluster" }, + "description": "Sample for UpdateCluster", "file": "dataproc_v1_generated_cluster_controller_update_cluster_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_UpdateCluster_async", "segments": [ { @@ -1459,18 +2756,70 @@ "start": 53, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_update_cluster_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", + "shortName": "ClusterControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.update_cluster", "method": { + "fullName": "google.cloud.dataproc.v1.ClusterController.UpdateCluster", "service": { + "fullName": "google.cloud.dataproc.v1.ClusterController", "shortName": "ClusterController" }, "shortName": "UpdateCluster" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.UpdateClusterRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "cluster_name", + "type": "str" + }, + { + "name": "cluster", + "type": "google.cloud.dataproc_v1.types.Cluster" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_cluster" }, + "description": "Sample for UpdateCluster", "file": "dataproc_v1_generated_cluster_controller_update_cluster_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_ClusterController_UpdateCluster_sync", "segments": [ { @@ -1503,19 +2852,63 @@ "start": 53, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_cluster_controller_update_cluster_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", + "shortName": "JobControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.cancel_job", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.CancelJob", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "CancelJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.CancelJobRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Job", + "shortName": "cancel_job" }, + "description": "Sample for CancelJob", "file": "dataproc_v1_generated_job_controller_cancel_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_CancelJob_async", "segments": [ { @@ -1548,18 +2941,62 @@ "start": 44, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_cancel_job_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerClient", + "shortName": "JobControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerClient.cancel_job", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.CancelJob", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "CancelJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.CancelJobRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Job", + "shortName": "cancel_job" }, + "description": "Sample for CancelJob", "file": "dataproc_v1_generated_job_controller_cancel_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_CancelJob_sync", "segments": [ { @@ -1592,19 +3029,62 @@ "start": 44, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_cancel_job_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", + "shortName": "JobControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.delete_job", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.DeleteJob", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "DeleteJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DeleteJobRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_job" }, + "description": "Sample for DeleteJob", "file": "dataproc_v1_generated_job_controller_delete_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_DeleteJob_async", "segments": [ { @@ -1635,18 +3115,61 @@ "end": 45, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_delete_job_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerClient", + "shortName": "JobControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerClient.delete_job", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.DeleteJob", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "DeleteJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DeleteJobRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_job" }, + "description": "Sample for DeleteJob", "file": "dataproc_v1_generated_job_controller_delete_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_DeleteJob_sync", "segments": [ { @@ -1677,19 +3200,63 @@ "end": 45, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_delete_job_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", + "shortName": "JobControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.get_job", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.GetJob", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "GetJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.GetJobRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Job", + "shortName": "get_job" }, + "description": "Sample for GetJob", "file": "dataproc_v1_generated_job_controller_get_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_GetJob_async", "segments": [ { @@ -1722,18 +3289,62 @@ "start": 44, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_get_job_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerClient", + "shortName": "JobControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerClient.get_job", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.GetJob", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "GetJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.GetJobRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Job", + "shortName": "get_job" }, + "description": "Sample for GetJob", "file": "dataproc_v1_generated_job_controller_get_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_GetJob_sync", "segments": [ { @@ -1766,19 +3377,63 @@ "start": 44, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_get_job_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", + "shortName": "JobControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.list_jobs", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.ListJobs", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "ListJobs" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.ListJobsRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.services.job_controller.pagers.ListJobsAsyncPager", + "shortName": "list_jobs" }, + "description": "Sample for ListJobs", "file": "dataproc_v1_generated_job_controller_list_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_ListJobs_async", "segments": [ { @@ -1811,18 +3466,62 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_list_jobs_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerClient", + "shortName": "JobControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerClient.list_jobs", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.ListJobs", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "ListJobs" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.ListJobsRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.services.job_controller.pagers.ListJobsPager", + "shortName": "list_jobs" }, + "description": "Sample for ListJobs", "file": "dataproc_v1_generated_job_controller_list_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_ListJobs_sync", "segments": [ { @@ -1855,19 +3554,63 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_list_jobs_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", + "shortName": "JobControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.submit_job_as_operation", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.SubmitJobAsOperation", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "SubmitJobAsOperation" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.SubmitJobRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "job", + "type": "google.cloud.dataproc_v1.types.Job" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "submit_job_as_operation" }, + "description": "Sample for SubmitJobAsOperation", "file": "dataproc_v1_generated_job_controller_submit_job_as_operation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_SubmitJobAsOperation_async", "segments": [ { @@ -1900,18 +3643,62 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_submit_job_as_operation_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerClient", + "shortName": "JobControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerClient.submit_job_as_operation", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.SubmitJobAsOperation", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "SubmitJobAsOperation" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.SubmitJobRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "job", + "type": "google.cloud.dataproc_v1.types.Job" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "submit_job_as_operation" }, + "description": "Sample for SubmitJobAsOperation", "file": "dataproc_v1_generated_job_controller_submit_job_as_operation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_SubmitJobAsOperation_sync", "segments": [ { @@ -1944,19 +3731,63 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_submit_job_as_operation_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", + "shortName": "JobControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.submit_job", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.SubmitJob", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "SubmitJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.SubmitJobRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "job", + "type": "google.cloud.dataproc_v1.types.Job" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Job", + "shortName": "submit_job" }, + "description": "Sample for SubmitJob", "file": "dataproc_v1_generated_job_controller_submit_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_SubmitJob_async", "segments": [ { @@ -1989,18 +3820,62 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_submit_job_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerClient", + "shortName": "JobControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerClient.submit_job", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.SubmitJob", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "SubmitJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.SubmitJobRequest" + }, + { + "name": "project_id", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "job", + "type": "google.cloud.dataproc_v1.types.Job" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Job", + "shortName": "submit_job" }, + "description": "Sample for SubmitJob", "file": "dataproc_v1_generated_job_controller_submit_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_SubmitJob_sync", "segments": [ { @@ -2033,19 +3908,51 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_submit_job_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", + "shortName": "JobControllerAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.update_job", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.UpdateJob", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "UpdateJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.UpdateJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Job", + "shortName": "update_job" }, + "description": "Sample for UpdateJob", "file": "dataproc_v1_generated_job_controller_update_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_UpdateJob_async", "segments": [ { @@ -2078,18 +3985,50 @@ "start": 49, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_update_job_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.JobControllerClient", + "shortName": "JobControllerClient" + }, + "fullName": "google.cloud.dataproc_v1.JobControllerClient.update_job", "method": { + "fullName": "google.cloud.dataproc.v1.JobController.UpdateJob", "service": { + "fullName": "google.cloud.dataproc.v1.JobController", "shortName": "JobController" }, "shortName": "UpdateJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.UpdateJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.Job", + "shortName": "update_job" }, + "description": "Sample for UpdateJob", "file": "dataproc_v1_generated_job_controller_update_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_JobController_UpdateJob_sync", "segments": [ { @@ -2122,19 +4061,59 @@ "start": 49, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_job_controller_update_job_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", + "shortName": "WorkflowTemplateServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.create_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "CreateWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.CreateWorkflowTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "template", + "type": "google.cloud.dataproc_v1.types.WorkflowTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.WorkflowTemplate", + "shortName": "create_workflow_template" }, + "description": "Sample for CreateWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_create_workflow_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_CreateWorkflowTemplate_async", "segments": [ { @@ -2167,18 +4146,58 @@ "start": 49, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_create_workflow_template_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", + "shortName": "WorkflowTemplateServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.create_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "CreateWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.CreateWorkflowTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "template", + "type": "google.cloud.dataproc_v1.types.WorkflowTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.WorkflowTemplate", + "shortName": "create_workflow_template" }, + "description": "Sample for CreateWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_create_workflow_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_CreateWorkflowTemplate_sync", "segments": [ { @@ -2211,19 +4230,54 @@ "start": 49, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_create_workflow_template_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", + "shortName": "WorkflowTemplateServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.delete_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "DeleteWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DeleteWorkflowTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_workflow_template" }, + "description": "Sample for DeleteWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_delete_workflow_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_DeleteWorkflowTemplate_async", "segments": [ { @@ -2254,18 +4308,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_delete_workflow_template_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", + "shortName": "WorkflowTemplateServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.delete_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "DeleteWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.DeleteWorkflowTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_workflow_template" }, + "description": "Sample for DeleteWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_delete_workflow_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_DeleteWorkflowTemplate_sync", "segments": [ { @@ -2296,19 +4385,55 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_delete_workflow_template_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", + "shortName": "WorkflowTemplateServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.get_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.GetWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "GetWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.GetWorkflowTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.WorkflowTemplate", + "shortName": "get_workflow_template" }, + "description": "Sample for GetWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_get_workflow_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_GetWorkflowTemplate_async", "segments": [ { @@ -2341,18 +4466,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_get_workflow_template_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", + "shortName": "WorkflowTemplateServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.get_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.GetWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "GetWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.GetWorkflowTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.WorkflowTemplate", + "shortName": "get_workflow_template" }, + "description": "Sample for GetWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_get_workflow_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_GetWorkflowTemplate_sync", "segments": [ { @@ -2385,19 +4546,59 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_get_workflow_template_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", + "shortName": "WorkflowTemplateServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.instantiate_inline_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateInlineWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "InstantiateInlineWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.InstantiateInlineWorkflowTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "template", + "type": "google.cloud.dataproc_v1.types.WorkflowTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "instantiate_inline_workflow_template" }, + "description": "Sample for InstantiateInlineWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_instantiate_inline_workflow_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_InstantiateInlineWorkflowTemplate_async", "segments": [ { @@ -2430,18 +4631,58 @@ "start": 53, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_instantiate_inline_workflow_template_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", + "shortName": "WorkflowTemplateServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.instantiate_inline_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateInlineWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "InstantiateInlineWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.InstantiateInlineWorkflowTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "template", + "type": "google.cloud.dataproc_v1.types.WorkflowTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "instantiate_inline_workflow_template" }, + "description": "Sample for InstantiateInlineWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_instantiate_inline_workflow_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_InstantiateInlineWorkflowTemplate_sync", "segments": [ { @@ -2474,19 +4715,59 @@ "start": 53, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_instantiate_inline_workflow_template_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", + "shortName": "WorkflowTemplateServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.instantiate_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "InstantiateWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "parameters", + "type": "Mapping[str, str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "instantiate_workflow_template" }, + "description": "Sample for InstantiateWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_instantiate_workflow_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_InstantiateWorkflowTemplate_async", "segments": [ { @@ -2519,18 +4800,58 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_instantiate_workflow_template_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", + "shortName": "WorkflowTemplateServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.instantiate_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "InstantiateWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "parameters", + "type": "Mapping[str, str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "instantiate_workflow_template" }, + "description": "Sample for InstantiateWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_instantiate_workflow_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_InstantiateWorkflowTemplate_sync", "segments": [ { @@ -2563,19 +4884,55 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_instantiate_workflow_template_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", + "shortName": "WorkflowTemplateServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.list_workflow_templates", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.ListWorkflowTemplates", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "ListWorkflowTemplates" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.ListWorkflowTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.services.workflow_template_service.pagers.ListWorkflowTemplatesAsyncPager", + "shortName": "list_workflow_templates" }, + "description": "Sample for ListWorkflowTemplates", "file": "dataproc_v1_generated_workflow_template_service_list_workflow_templates_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_ListWorkflowTemplates_async", "segments": [ { @@ -2608,18 +4965,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_list_workflow_templates_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", + "shortName": "WorkflowTemplateServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.list_workflow_templates", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.ListWorkflowTemplates", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "ListWorkflowTemplates" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.ListWorkflowTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.services.workflow_template_service.pagers.ListWorkflowTemplatesPager", + "shortName": "list_workflow_templates" }, + "description": "Sample for ListWorkflowTemplates", "file": "dataproc_v1_generated_workflow_template_service_list_workflow_templates_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_ListWorkflowTemplates_sync", "segments": [ { @@ -2652,19 +5045,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_list_workflow_templates_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", + "shortName": "WorkflowTemplateServiceAsyncClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.update_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.UpdateWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "UpdateWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.UpdateWorkflowTemplateRequest" + }, + { + "name": "template", + "type": "google.cloud.dataproc_v1.types.WorkflowTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.WorkflowTemplate", + "shortName": "update_workflow_template" }, + "description": "Sample for UpdateWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_update_workflow_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_UpdateWorkflowTemplate_async", "segments": [ { @@ -2697,18 +5126,54 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_update_workflow_template_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", + "shortName": "WorkflowTemplateServiceClient" + }, + "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.update_workflow_template", "method": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.UpdateWorkflowTemplate", "service": { + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", "shortName": "WorkflowTemplateService" }, "shortName": "UpdateWorkflowTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataproc_v1.types.UpdateWorkflowTemplateRequest" + }, + { + "name": "template", + "type": "google.cloud.dataproc_v1.types.WorkflowTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataproc_v1.types.WorkflowTemplate", + "shortName": "update_workflow_template" }, + "description": "Sample for UpdateWorkflowTemplate", "file": "dataproc_v1_generated_workflow_template_service_update_workflow_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "dataproc_v1_generated_WorkflowTemplateService_UpdateWorkflowTemplate_sync", "segments": [ { @@ -2741,7 +5206,8 @@ "start": 48, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "dataproc_v1_generated_workflow_template_service_update_workflow_template_sync.py" } ] } diff --git a/setup.py b/setup.py index 7656c623..0b606ad5 100644 --- a/setup.py +++ b/setup.py @@ -29,10 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - # NOTE: Maintainers, please do not require google-api-core>=2.x.x - # Until this issue is closed - # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", + "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "proto-plus >= 1.15.0, <2.0.0dev", "protobuf >= 3.19.0, <4.0.0dev", ] diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt deleted file mode 100644 index 3ef972c6..00000000 --- a/testing/constraints-3.6.txt +++ /dev/null @@ -1,11 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List *all* library dependencies and extras in this file. -# Pin the version to the lower bound. -# -# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -# Then this file should have foo==1.14.0 -google-api-core==1.31.5 -libcst==0.2.5 -proto-plus==1.15.0 -protobuf==3.19.0 diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 3ef972c6..70a508fd 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -5,7 +5,7 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.31.5 +google-api-core==1.32.0 libcst==0.2.5 proto-plus==1.15.0 protobuf==3.19.0 diff --git a/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py b/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py index c7786a62..d83c5f5e 100644 --- a/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py +++ b/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py @@ -14,7 +14,13 @@ # limitations under the License. # import os -import mock + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio @@ -91,24 +97,26 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - AutoscalingPolicyServiceClient, - AutoscalingPolicyServiceAsyncClient, + (AutoscalingPolicyServiceClient, "grpc"), + (AutoscalingPolicyServiceAsyncClient, "grpc_asyncio"), ], ) -def test_autoscaling_policy_service_client_from_service_account_info(client_class): +def test_autoscaling_policy_service_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") @pytest.mark.parametrize( @@ -137,27 +145,33 @@ def test_autoscaling_policy_service_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - AutoscalingPolicyServiceClient, - AutoscalingPolicyServiceAsyncClient, + (AutoscalingPolicyServiceClient, "grpc"), + (AutoscalingPolicyServiceAsyncClient, "grpc_asyncio"), ], ) -def test_autoscaling_policy_service_client_from_service_account_file(client_class): +def test_autoscaling_policy_service_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") def test_autoscaling_policy_service_client_get_transport_class(): @@ -228,6 +242,7 @@ def test_autoscaling_policy_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -245,6 +260,7 @@ def test_autoscaling_policy_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -262,6 +278,7 @@ def test_autoscaling_policy_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -291,6 +308,25 @@ def test_autoscaling_policy_service_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -368,6 +404,7 @@ def test_autoscaling_policy_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -402,6 +439,7 @@ def test_autoscaling_policy_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -424,6 +462,7 @@ def test_autoscaling_policy_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -545,6 +584,7 @@ def test_autoscaling_policy_service_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -583,6 +623,7 @@ def test_autoscaling_policy_service_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -603,6 +644,7 @@ def test_autoscaling_policy_service_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -641,6 +683,7 @@ def test_autoscaling_policy_service_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -786,7 +829,7 @@ def test_create_autoscaling_policy_field_headers(): # a field header. Set these to a non-empty value. request = autoscaling_policies.CreateAutoscalingPolicyRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -804,7 +847,7 @@ def test_create_autoscaling_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -818,7 +861,7 @@ async def test_create_autoscaling_policy_field_headers_async(): # a field header. Set these to a non-empty value. request = autoscaling_policies.CreateAutoscalingPolicyRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -838,7 +881,7 @@ async def test_create_autoscaling_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1052,7 +1095,7 @@ def test_update_autoscaling_policy_field_headers(): # a field header. Set these to a non-empty value. request = autoscaling_policies.UpdateAutoscalingPolicyRequest() - request.policy.name = "policy.name/value" + request.policy.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1070,7 +1113,7 @@ def test_update_autoscaling_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "policy.name=policy.name/value", + "policy.name=name_value", ) in kw["metadata"] @@ -1084,7 +1127,7 @@ async def test_update_autoscaling_policy_field_headers_async(): # a field header. Set these to a non-empty value. request = autoscaling_policies.UpdateAutoscalingPolicyRequest() - request.policy.name = "policy.name/value" + request.policy.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1104,7 +1147,7 @@ async def test_update_autoscaling_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "policy.name=policy.name/value", + "policy.name=name_value", ) in kw["metadata"] @@ -1308,7 +1351,7 @@ def test_get_autoscaling_policy_field_headers(): # a field header. Set these to a non-empty value. request = autoscaling_policies.GetAutoscalingPolicyRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1326,7 +1369,7 @@ def test_get_autoscaling_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1340,7 +1383,7 @@ async def test_get_autoscaling_policy_field_headers_async(): # a field header. Set these to a non-empty value. request = autoscaling_policies.GetAutoscalingPolicyRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1360,7 +1403,7 @@ async def test_get_autoscaling_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1555,7 +1598,7 @@ def test_list_autoscaling_policies_field_headers(): # a field header. Set these to a non-empty value. request = autoscaling_policies.ListAutoscalingPoliciesRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1573,7 +1616,7 @@ def test_list_autoscaling_policies_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1587,7 +1630,7 @@ async def test_list_autoscaling_policies_field_headers_async(): # a field header. Set these to a non-empty value. request = autoscaling_policies.ListAutoscalingPoliciesRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1607,7 +1650,7 @@ async def test_list_autoscaling_policies_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1744,7 +1787,7 @@ def test_list_autoscaling_policies_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all( isinstance(i, autoscaling_policies.AutoscalingPolicy) for i in results @@ -1839,7 +1882,7 @@ async def test_list_autoscaling_policies_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1889,7 +1932,9 @@ async def test_list_autoscaling_policies_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_autoscaling_policies(request={})).pages: + async for page_ in ( + await client.list_autoscaling_policies(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1992,7 +2037,7 @@ def test_delete_autoscaling_policy_field_headers(): # a field header. Set these to a non-empty value. request = autoscaling_policies.DeleteAutoscalingPolicyRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2010,7 +2055,7 @@ def test_delete_autoscaling_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2024,7 +2069,7 @@ async def test_delete_autoscaling_policy_field_headers_async(): # a field header. Set these to a non-empty value. request = autoscaling_policies.DeleteAutoscalingPolicyRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2042,7 +2087,7 @@ async def test_delete_autoscaling_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2221,6 +2266,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = AutoscalingPolicyServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = AutoscalingPolicyServiceClient( @@ -2267,6 +2325,14 @@ def test_autoscaling_policy_service_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_autoscaling_policy_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -2332,6 +2398,28 @@ def test_autoscaling_policy_service_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutoscalingPolicyServiceGrpcTransport, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + ], +) +def test_autoscaling_policy_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2416,24 +2504,40 @@ def test_autoscaling_policy_service_grpc_transport_client_cert_source_for_mtls( ) -def test_autoscaling_policy_service_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_autoscaling_policy_service_host_no_port(transport_name): client = AutoscalingPolicyServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") -def test_autoscaling_policy_service_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_autoscaling_policy_service_host_with_port(transport_name): client = AutoscalingPolicyServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "dataproc.googleapis.com:8000" + assert client.transport._host == ("dataproc.googleapis.com:8000") def test_autoscaling_policy_service_grpc_transport_channel(): @@ -2796,4 +2900,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/tests/unit/gapic/dataproc_v1/test_batch_controller.py b/tests/unit/gapic/dataproc_v1/test_batch_controller.py index 93a34e14..7a966d50 100644 --- a/tests/unit/gapic/dataproc_v1/test_batch_controller.py +++ b/tests/unit/gapic/dataproc_v1/test_batch_controller.py @@ -14,7 +14,13 @@ # limitations under the License. # import os -import mock + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio @@ -95,24 +101,26 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - BatchControllerClient, - BatchControllerAsyncClient, + (BatchControllerClient, "grpc"), + (BatchControllerAsyncClient, "grpc_asyncio"), ], ) -def test_batch_controller_client_from_service_account_info(client_class): +def test_batch_controller_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") @pytest.mark.parametrize( @@ -141,27 +149,33 @@ def test_batch_controller_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - BatchControllerClient, - BatchControllerAsyncClient, + (BatchControllerClient, "grpc"), + (BatchControllerAsyncClient, "grpc_asyncio"), ], ) -def test_batch_controller_client_from_service_account_file(client_class): +def test_batch_controller_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") def test_batch_controller_client_get_transport_class(): @@ -224,6 +238,7 @@ def test_batch_controller_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -241,6 +256,7 @@ def test_batch_controller_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -258,6 +274,7 @@ def test_batch_controller_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -287,6 +304,25 @@ def test_batch_controller_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -364,6 +400,7 @@ def test_batch_controller_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -398,6 +435,7 @@ def test_batch_controller_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -420,6 +458,7 @@ def test_batch_controller_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -534,6 +573,7 @@ def test_batch_controller_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -572,6 +612,7 @@ def test_batch_controller_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -592,6 +633,7 @@ def test_batch_controller_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -630,6 +672,7 @@ def test_batch_controller_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -753,7 +796,7 @@ def test_create_batch_field_headers(): # a field header. Set these to a non-empty value. request = batches.CreateBatchRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_batch), "__call__") as call: @@ -769,7 +812,7 @@ def test_create_batch_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -783,7 +826,7 @@ async def test_create_batch_field_headers_async(): # a field header. Set these to a non-empty value. request = batches.CreateBatchRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_batch), "__call__") as call: @@ -801,7 +844,7 @@ async def test_create_batch_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1028,7 +1071,7 @@ def test_get_batch_field_headers(): # a field header. Set these to a non-empty value. request = batches.GetBatchRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_batch), "__call__") as call: @@ -1044,7 +1087,7 @@ def test_get_batch_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1058,7 +1101,7 @@ async def test_get_batch_field_headers_async(): # a field header. Set these to a non-empty value. request = batches.GetBatchRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_batch), "__call__") as call: @@ -1074,7 +1117,7 @@ async def test_get_batch_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1256,7 +1299,7 @@ def test_list_batches_field_headers(): # a field header. Set these to a non-empty value. request = batches.ListBatchesRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_batches), "__call__") as call: @@ -1272,7 +1315,7 @@ def test_list_batches_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1286,7 +1329,7 @@ async def test_list_batches_field_headers_async(): # a field header. Set these to a non-empty value. request = batches.ListBatchesRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_batches), "__call__") as call: @@ -1304,7 +1347,7 @@ async def test_list_batches_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1435,7 +1478,7 @@ def test_list_batches_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, batches.Batch) for i in results) @@ -1524,7 +1567,7 @@ async def test_list_batches_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1570,7 +1613,9 @@ async def test_list_batches_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_batches(request={})).pages: + async for page_ in ( + await client.list_batches(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1666,7 +1711,7 @@ def test_delete_batch_field_headers(): # a field header. Set these to a non-empty value. request = batches.DeleteBatchRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_batch), "__call__") as call: @@ -1682,7 +1727,7 @@ def test_delete_batch_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1696,7 +1741,7 @@ async def test_delete_batch_field_headers_async(): # a field header. Set these to a non-empty value. request = batches.DeleteBatchRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_batch), "__call__") as call: @@ -1712,7 +1757,7 @@ async def test_delete_batch_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1887,6 +1932,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = BatchControllerClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = BatchControllerClient( @@ -1937,6 +1995,14 @@ def test_batch_controller_base_transport(): with pytest.raises(NotImplementedError): transport.operations_client + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_batch_controller_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -2002,6 +2068,28 @@ def test_batch_controller_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.BatchControllerGrpcTransport, + transports.BatchControllerGrpcAsyncIOTransport, + ], +) +def test_batch_controller_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2082,24 +2170,40 @@ def test_batch_controller_grpc_transport_client_cert_source_for_mtls(transport_c ) -def test_batch_controller_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_batch_controller_host_no_port(transport_name): client = BatchControllerClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") -def test_batch_controller_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_batch_controller_host_with_port(transport_name): client = BatchControllerClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "dataproc.googleapis.com:8000" + assert client.transport._host == ("dataproc.googleapis.com:8000") def test_batch_controller_grpc_transport_channel(): @@ -2488,4 +2592,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/tests/unit/gapic/dataproc_v1/test_cluster_controller.py b/tests/unit/gapic/dataproc_v1/test_cluster_controller.py index 37caa114..6898bbb8 100644 --- a/tests/unit/gapic/dataproc_v1/test_cluster_controller.py +++ b/tests/unit/gapic/dataproc_v1/test_cluster_controller.py @@ -14,7 +14,13 @@ # limitations under the License. # import os -import mock + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio @@ -98,24 +104,26 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - ClusterControllerClient, - ClusterControllerAsyncClient, + (ClusterControllerClient, "grpc"), + (ClusterControllerAsyncClient, "grpc_asyncio"), ], ) -def test_cluster_controller_client_from_service_account_info(client_class): +def test_cluster_controller_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") @pytest.mark.parametrize( @@ -144,27 +152,33 @@ def test_cluster_controller_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - ClusterControllerClient, - ClusterControllerAsyncClient, + (ClusterControllerClient, "grpc"), + (ClusterControllerAsyncClient, "grpc_asyncio"), ], ) -def test_cluster_controller_client_from_service_account_file(client_class): +def test_cluster_controller_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") def test_cluster_controller_client_get_transport_class(): @@ -227,6 +241,7 @@ def test_cluster_controller_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -244,6 +259,7 @@ def test_cluster_controller_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -261,6 +277,7 @@ def test_cluster_controller_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -290,6 +307,25 @@ def test_cluster_controller_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -367,6 +403,7 @@ def test_cluster_controller_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -401,6 +438,7 @@ def test_cluster_controller_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -423,6 +461,7 @@ def test_cluster_controller_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -537,6 +576,7 @@ def test_cluster_controller_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -575,6 +615,7 @@ def test_cluster_controller_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -595,6 +636,7 @@ def test_cluster_controller_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -633,6 +675,7 @@ def test_cluster_controller_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -747,6 +790,69 @@ async def test_create_cluster_async_from_dict(): await test_create_cluster_async(request_type=dict) +def test_create_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.CreateClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.CreateClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + def test_create_cluster_flattened(): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -932,6 +1038,71 @@ async def test_update_cluster_async_from_dict(): await test_update_cluster_async(request_type=dict) +def test_update_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.UpdateClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.UpdateClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + def test_update_cluster_flattened(): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1137,6 +1308,71 @@ async def test_stop_cluster_async_from_dict(): await test_stop_cluster_async(request_type=dict) +def test_stop_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.StopClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.stop_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_stop_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.StopClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.stop_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -1220,6 +1456,71 @@ async def test_start_cluster_async_from_dict(): await test_start_cluster_async(request_type=dict) +def test_start_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.StartClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.start_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_start_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.StartClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.start_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -1303,6 +1604,71 @@ async def test_delete_cluster_async_from_dict(): await test_delete_cluster_async(request_type=dict) +def test_delete_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.DeleteClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.DeleteClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + def test_delete_cluster_flattened(): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1502,6 +1868,69 @@ async def test_get_cluster_async_from_dict(): await test_get_cluster_async(request_type=dict) +def test_get_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.GetClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value = clusters.Cluster() + client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.GetClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clusters.Cluster()) + await client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + def test_get_cluster_flattened(): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1691,6 +2120,69 @@ async def test_list_clusters_async_from_dict(): await test_list_clusters_async(request_type=dict) +def test_list_clusters_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.ListClustersRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value = clusters.ListClustersResponse() + client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_clusters_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.ListClustersRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clusters.ListClustersResponse() + ) + await client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + def test_list_clusters_flattened(): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1831,11 +2323,19 @@ def test_list_clusters_pager(transport_name: str = "grpc"): ) metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", ""), + ("region", ""), + ) + ), + ) pager = client.list_clusters(request={}) assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, clusters.Cluster) for i in results) @@ -1924,7 +2424,7 @@ async def test_list_clusters_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1970,7 +2470,9 @@ async def test_list_clusters_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_clusters(request={})).pages: + async for page_ in ( + await client.list_clusters(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2059,6 +2561,71 @@ async def test_diagnose_cluster_async_from_dict(): await test_diagnose_cluster_async(request_type=dict) +def test_diagnose_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.DiagnoseClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.diagnose_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.diagnose_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_diagnose_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.DiagnoseClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.diagnose_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.diagnose_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + def test_diagnose_cluster_flattened(): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2252,6 +2819,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = ClusterControllerClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = ClusterControllerClient( @@ -2306,6 +2886,14 @@ def test_cluster_controller_base_transport(): with pytest.raises(NotImplementedError): transport.operations_client + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_cluster_controller_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -2371,6 +2959,28 @@ def test_cluster_controller_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterControllerGrpcTransport, + transports.ClusterControllerGrpcAsyncIOTransport, + ], +) +def test_cluster_controller_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2451,24 +3061,40 @@ def test_cluster_controller_grpc_transport_client_cert_source_for_mtls(transport ) -def test_cluster_controller_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_cluster_controller_host_no_port(transport_name): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") -def test_cluster_controller_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_cluster_controller_host_with_port(transport_name): client = ClusterControllerClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "dataproc.googleapis.com:8000" + assert client.transport._host == ("dataproc.googleapis.com:8000") def test_cluster_controller_grpc_transport_channel(): @@ -2860,4 +3486,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/tests/unit/gapic/dataproc_v1/test_job_controller.py b/tests/unit/gapic/dataproc_v1/test_job_controller.py index bbd57687..b7919cd3 100644 --- a/tests/unit/gapic/dataproc_v1/test_job_controller.py +++ b/tests/unit/gapic/dataproc_v1/test_job_controller.py @@ -14,7 +14,13 @@ # limitations under the License. # import os -import mock + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio @@ -92,24 +98,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - JobControllerClient, - JobControllerAsyncClient, + (JobControllerClient, "grpc"), + (JobControllerAsyncClient, "grpc_asyncio"), ], ) -def test_job_controller_client_from_service_account_info(client_class): +def test_job_controller_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") @pytest.mark.parametrize( @@ -138,27 +144,31 @@ def test_job_controller_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - JobControllerClient, - JobControllerAsyncClient, + (JobControllerClient, "grpc"), + (JobControllerAsyncClient, "grpc_asyncio"), ], ) -def test_job_controller_client_from_service_account_file(client_class): +def test_job_controller_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") def test_job_controller_client_get_transport_class(): @@ -221,6 +231,7 @@ def test_job_controller_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -238,6 +249,7 @@ def test_job_controller_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -255,6 +267,7 @@ def test_job_controller_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -284,6 +297,25 @@ def test_job_controller_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -351,6 +383,7 @@ def test_job_controller_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -385,6 +418,7 @@ def test_job_controller_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -407,6 +441,7 @@ def test_job_controller_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -521,6 +556,7 @@ def test_job_controller_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -559,6 +595,7 @@ def test_job_controller_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -579,6 +616,7 @@ def test_job_controller_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -617,6 +655,7 @@ def test_job_controller_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -750,6 +789,67 @@ async def test_submit_job_async_from_dict(): await test_submit_job_async(request_type=dict) +def test_submit_job_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.SubmitJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_job), "__call__") as call: + call.return_value = jobs.Job() + client.submit_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_submit_job_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.SubmitJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) + await client.submit_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + def test_submit_job_flattened(): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -939,6 +1039,73 @@ async def test_submit_job_as_operation_async_from_dict(): await test_submit_job_as_operation_async(request_type=dict) +def test_submit_job_as_operation_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.SubmitJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_job_as_operation), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.submit_job_as_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_submit_job_as_operation_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.SubmitJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_job_as_operation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.submit_job_as_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + def test_submit_job_as_operation_flattened(): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1147,6 +1314,69 @@ async def test_get_job_async_from_dict(): await test_get_job_async(request_type=dict) +def test_get_job_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.GetJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + call.return_value = jobs.Job() + client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_job_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.GetJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) + await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + def test_get_job_flattened(): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1336,6 +1566,69 @@ async def test_list_jobs_async_from_dict(): await test_list_jobs_async(request_type=dict) +def test_list_jobs_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.ListJobsRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + call.return_value = jobs.ListJobsResponse() + client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_jobs_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.ListJobsRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + jobs.ListJobsResponse() + ) + await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + def test_list_jobs_flattened(): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1476,11 +1769,19 @@ def test_list_jobs_pager(transport_name: str = "grpc"): ) metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", ""), + ("region", ""), + ) + ), + ) pager = client.list_jobs(request={}) assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, jobs.Job) for i in results) @@ -1569,7 +1870,7 @@ async def test_list_jobs_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1615,7 +1916,9 @@ async def test_list_jobs_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_jobs(request={})).pages: + async for page_ in ( + await client.list_jobs(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1723,6 +2026,69 @@ async def test_update_job_async_from_dict(): await test_update_job_async(request_type=dict) +def test_update_job_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.UpdateJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + call.return_value = jobs.Job() + client.update_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_job_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.UpdateJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) + await client.update_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -1825,6 +2191,69 @@ async def test_cancel_job_async_from_dict(): await test_cancel_job_async(request_type=dict) +def test_cancel_job_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.CancelJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + call.return_value = jobs.Job() + client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_job_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.CancelJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) + await client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + def test_cancel_job_flattened(): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2006,6 +2435,69 @@ async def test_delete_job_async_from_dict(): await test_delete_job_async(request_type=dict) +def test_delete_job_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.DeleteJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + call.return_value = None + client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_job_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.DeleteJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + def test_delete_job_flattened(): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2197,6 +2689,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = JobControllerClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = JobControllerClient( @@ -2250,6 +2755,14 @@ def test_job_controller_base_transport(): with pytest.raises(NotImplementedError): transport.operations_client + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_job_controller_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -2315,6 +2828,28 @@ def test_job_controller_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.JobControllerGrpcTransport, + transports.JobControllerGrpcAsyncIOTransport, + ], +) +def test_job_controller_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2395,24 +2930,40 @@ def test_job_controller_grpc_transport_client_cert_source_for_mtls(transport_cla ) -def test_job_controller_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_job_controller_host_no_port(transport_name): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") -def test_job_controller_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_job_controller_host_with_port(transport_name): client = JobControllerClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "dataproc.googleapis.com:8000" + assert client.transport._host == ("dataproc.googleapis.com:8000") def test_job_controller_grpc_transport_channel(): @@ -2773,4 +3324,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py b/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py index 6b297b0f..eb71a5f7 100644 --- a/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py +++ b/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py @@ -14,7 +14,13 @@ # limitations under the License. # import os -import mock + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio @@ -100,24 +106,26 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - WorkflowTemplateServiceClient, - WorkflowTemplateServiceAsyncClient, + (WorkflowTemplateServiceClient, "grpc"), + (WorkflowTemplateServiceAsyncClient, "grpc_asyncio"), ], ) -def test_workflow_template_service_client_from_service_account_info(client_class): +def test_workflow_template_service_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") @pytest.mark.parametrize( @@ -146,27 +154,33 @@ def test_workflow_template_service_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - WorkflowTemplateServiceClient, - WorkflowTemplateServiceAsyncClient, + (WorkflowTemplateServiceClient, "grpc"), + (WorkflowTemplateServiceAsyncClient, "grpc_asyncio"), ], ) -def test_workflow_template_service_client_from_service_account_file(client_class): +def test_workflow_template_service_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") def test_workflow_template_service_client_get_transport_class(): @@ -233,6 +247,7 @@ def test_workflow_template_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -250,6 +265,7 @@ def test_workflow_template_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -267,6 +283,7 @@ def test_workflow_template_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -296,6 +313,25 @@ def test_workflow_template_service_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -373,6 +409,7 @@ def test_workflow_template_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -407,6 +444,7 @@ def test_workflow_template_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -429,6 +467,7 @@ def test_workflow_template_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -549,6 +588,7 @@ def test_workflow_template_service_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -587,6 +627,7 @@ def test_workflow_template_service_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -607,6 +648,7 @@ def test_workflow_template_service_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -645,6 +687,7 @@ def test_workflow_template_service_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -789,7 +832,7 @@ def test_create_workflow_template_field_headers(): # a field header. Set these to a non-empty value. request = workflow_templates.CreateWorkflowTemplateRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -807,7 +850,7 @@ def test_create_workflow_template_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -821,7 +864,7 @@ async def test_create_workflow_template_field_headers_async(): # a field header. Set these to a non-empty value. request = workflow_templates.CreateWorkflowTemplateRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -841,7 +884,7 @@ async def test_create_workflow_template_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1054,7 +1097,7 @@ def test_get_workflow_template_field_headers(): # a field header. Set these to a non-empty value. request = workflow_templates.GetWorkflowTemplateRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1072,7 +1115,7 @@ def test_get_workflow_template_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1086,7 +1129,7 @@ async def test_get_workflow_template_field_headers_async(): # a field header. Set these to a non-empty value. request = workflow_templates.GetWorkflowTemplateRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1106,7 +1149,7 @@ async def test_get_workflow_template_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1295,7 +1338,7 @@ def test_instantiate_workflow_template_field_headers(): # a field header. Set these to a non-empty value. request = workflow_templates.InstantiateWorkflowTemplateRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1313,7 +1356,7 @@ def test_instantiate_workflow_template_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1327,7 +1370,7 @@ async def test_instantiate_workflow_template_field_headers_async(): # a field header. Set these to a non-empty value. request = workflow_templates.InstantiateWorkflowTemplateRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1347,7 +1390,7 @@ async def test_instantiate_workflow_template_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1546,7 +1589,7 @@ def test_instantiate_inline_workflow_template_field_headers(): # a field header. Set these to a non-empty value. request = workflow_templates.InstantiateInlineWorkflowTemplateRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1564,7 +1607,7 @@ def test_instantiate_inline_workflow_template_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1578,7 +1621,7 @@ async def test_instantiate_inline_workflow_template_field_headers_async(): # a field header. Set these to a non-empty value. request = workflow_templates.InstantiateInlineWorkflowTemplateRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1598,7 +1641,7 @@ async def test_instantiate_inline_workflow_template_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1811,7 +1854,7 @@ def test_update_workflow_template_field_headers(): # a field header. Set these to a non-empty value. request = workflow_templates.UpdateWorkflowTemplateRequest() - request.template.name = "template.name/value" + request.template.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1829,7 +1872,7 @@ def test_update_workflow_template_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "template.name=template.name/value", + "template.name=name_value", ) in kw["metadata"] @@ -1843,7 +1886,7 @@ async def test_update_workflow_template_field_headers_async(): # a field header. Set these to a non-empty value. request = workflow_templates.UpdateWorkflowTemplateRequest() - request.template.name = "template.name/value" + request.template.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1863,7 +1906,7 @@ async def test_update_workflow_template_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "template.name=template.name/value", + "template.name=name_value", ) in kw["metadata"] @@ -2058,7 +2101,7 @@ def test_list_workflow_templates_field_headers(): # a field header. Set these to a non-empty value. request = workflow_templates.ListWorkflowTemplatesRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2076,7 +2119,7 @@ def test_list_workflow_templates_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -2090,7 +2133,7 @@ async def test_list_workflow_templates_field_headers_async(): # a field header. Set these to a non-empty value. request = workflow_templates.ListWorkflowTemplatesRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2110,7 +2153,7 @@ async def test_list_workflow_templates_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -2247,7 +2290,7 @@ def test_list_workflow_templates_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, workflow_templates.WorkflowTemplate) for i in results) @@ -2340,7 +2383,7 @@ async def test_list_workflow_templates_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -2390,7 +2433,9 @@ async def test_list_workflow_templates_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_workflow_templates(request={})).pages: + async for page_ in ( + await client.list_workflow_templates(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2493,7 +2538,7 @@ def test_delete_workflow_template_field_headers(): # a field header. Set these to a non-empty value. request = workflow_templates.DeleteWorkflowTemplateRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2511,7 +2556,7 @@ def test_delete_workflow_template_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2525,7 +2570,7 @@ async def test_delete_workflow_template_field_headers_async(): # a field header. Set these to a non-empty value. request = workflow_templates.DeleteWorkflowTemplateRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2543,7 +2588,7 @@ async def test_delete_workflow_template_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2722,6 +2767,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = WorkflowTemplateServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = WorkflowTemplateServiceClient( @@ -2775,6 +2833,14 @@ def test_workflow_template_service_base_transport(): with pytest.raises(NotImplementedError): transport.operations_client + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_workflow_template_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -2840,6 +2906,28 @@ def test_workflow_template_service_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.WorkflowTemplateServiceGrpcTransport, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + ], +) +def test_workflow_template_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2924,24 +3012,40 @@ def test_workflow_template_service_grpc_transport_client_cert_source_for_mtls( ) -def test_workflow_template_service_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_workflow_template_service_host_no_port(transport_name): client = WorkflowTemplateServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "dataproc.googleapis.com:443" + assert client.transport._host == ("dataproc.googleapis.com:443") -def test_workflow_template_service_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_workflow_template_service_host_with_port(transport_name): client = WorkflowTemplateServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "dataproc.googleapis.com:8000" + assert client.transport._host == ("dataproc.googleapis.com:8000") def test_workflow_template_service_grpc_transport_channel(): @@ -3364,4 +3468,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, )