diff --git a/dataproc/google/cloud/dataproc.py b/dataproc/google/cloud/dataproc.py index 25b767956a0c..aa7757bd5340 100644 --- a/dataproc/google/cloud/dataproc.py +++ b/dataproc/google/cloud/dataproc.py @@ -17,6 +17,7 @@ from __future__ import absolute_import +from google.cloud.dataproc_v1 import AutoscalingPolicyServiceClient from google.cloud.dataproc_v1 import ClusterControllerClient from google.cloud.dataproc_v1 import JobControllerClient from google.cloud.dataproc_v1 import WorkflowTemplateServiceClient @@ -30,4 +31,5 @@ "ClusterControllerClient", "JobControllerClient", "WorkflowTemplateServiceClient", + "AutoscalingPolicyServiceClient", ) diff --git a/dataproc/google/cloud/dataproc_v1/__init__.py b/dataproc/google/cloud/dataproc_v1/__init__.py index 395e618f20f9..267bdb29839b 100644 --- a/dataproc/google/cloud/dataproc_v1/__init__.py +++ b/dataproc/google/cloud/dataproc_v1/__init__.py @@ -20,6 +20,7 @@ import warnings from google.cloud.dataproc_v1 import types +from google.cloud.dataproc_v1.gapic import autoscaling_policy_service_client from google.cloud.dataproc_v1.gapic import cluster_controller_client from google.cloud.dataproc_v1.gapic import enums from google.cloud.dataproc_v1.gapic import job_controller_client @@ -52,10 +53,18 @@ class WorkflowTemplateServiceClient( enums = enums +class AutoscalingPolicyServiceClient( + autoscaling_policy_service_client.AutoscalingPolicyServiceClient +): + __doc__ = autoscaling_policy_service_client.AutoscalingPolicyServiceClient.__doc__ + enums = enums + + __all__ = ( "enums", "types", "ClusterControllerClient", "JobControllerClient", "WorkflowTemplateServiceClient", + "AutoscalingPolicyServiceClient", ) diff --git a/dataproc/google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client.py b/dataproc/google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client.py new file mode 100644 index 000000000000..603aa14f063c --- /dev/null +++ b/dataproc/google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client.py @@ -0,0 +1,648 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Accesses the google.cloud.dataproc.v1 AutoscalingPolicyService API.""" + +import functools +import pkg_resources +import warnings + +from google.oauth2 import service_account +import google.api_core.client_options +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header +import google.api_core.grpc_helpers +import google.api_core.page_iterator +import google.api_core.path_template +import grpc + +from google.cloud.dataproc_v1.gapic import autoscaling_policy_service_client_config +from google.cloud.dataproc_v1.gapic import enums +from google.cloud.dataproc_v1.gapic.transports import ( + autoscaling_policy_service_grpc_transport, +) +from google.cloud.dataproc_v1.proto import autoscaling_policies_pb2 +from google.cloud.dataproc_v1.proto import autoscaling_policies_pb2_grpc +from google.cloud.dataproc_v1.proto import clusters_pb2 +from google.cloud.dataproc_v1.proto import clusters_pb2_grpc +from google.cloud.dataproc_v1.proto import jobs_pb2 +from google.cloud.dataproc_v1.proto import jobs_pb2_grpc +from google.cloud.dataproc_v1.proto import operations_pb2 as proto_operations_pb2 +from google.cloud.dataproc_v1.proto import workflow_templates_pb2 +from google.cloud.dataproc_v1.proto import workflow_templates_pb2_grpc +from google.longrunning import operations_pb2 as longrunning_operations_pb2 +from google.protobuf import duration_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 + + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-dataproc").version + + +class AutoscalingPolicyServiceClient(object): + """ + The API interface for managing autoscaling policies in the + Dataproc API. + """ + + SERVICE_ADDRESS = "dataproc.googleapis.com:443" + """The default address of the service.""" + + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. + _INTERFACE_NAME = "google.cloud.dataproc.v1.AutoscalingPolicyService" + + @classmethod + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutoscalingPolicyServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @classmethod + def autoscaling_policy_path(cls, project, region, autoscaling_policy): + """Return a fully-qualified autoscaling_policy string.""" + return google.api_core.path_template.expand( + "projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}", + project=project, + region=region, + autoscaling_policy=autoscaling_policy, + ) + + @classmethod + def region_path(cls, project, region): + """Return a fully-qualified region string.""" + return google.api_core.path_template.expand( + "projects/{project}/regions/{region}", project=project, region=region + ) + + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + client_options=None, + ): + """Constructor. + + Args: + transport (Union[~.AutoscalingPolicyServiceGrpcTransport, + Callable[[~.Credentials, type], ~.AutoscalingPolicyServiceGrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. + """ + # Raise deprecation warnings for things we want to go away. + if client_config is not None: + warnings.warn( + "The `client_config` argument is deprecated.", + PendingDeprecationWarning, + stacklevel=2, + ) + else: + client_config = autoscaling_policy_service_client_config.config + + if channel: + warnings.warn( + "The `channel` argument is deprecated; use " "`transport` instead.", + PendingDeprecationWarning, + stacklevel=2, + ) + + api_endpoint = self.SERVICE_ADDRESS + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=autoscaling_policy_service_grpc_transport.AutoscalingPolicyServiceGrpcTransport, + address=api_endpoint, + ) + else: + if credentials: + raise ValueError( + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) + self.transport = transport + else: + self.transport = autoscaling_policy_service_grpc_transport.AutoscalingPolicyServiceGrpcTransport( + address=api_endpoint, channel=channel, credentials=credentials + ) + + if client_info is None: + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION + ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config["interfaces"][self._INTERFACE_NAME] + ) + + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} + + # Service calls + def create_autoscaling_policy( + self, + parent, + policy, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Creates new autoscaling policy. + + Example: + >>> from google.cloud import dataproc_v1 + >>> + >>> client = dataproc_v1.AutoscalingPolicyServiceClient() + >>> + >>> parent = client.region_path('[PROJECT]', '[REGION]') + >>> + >>> # TODO: Initialize `policy`: + >>> policy = {} + >>> + >>> response = client.create_autoscaling_policy(parent, policy) + + Args: + parent (str): Required. The "resource name" of the region or location, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.autoscalingPolicies.create``, the resource + name of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.autoscalingPolicies.create``, the resource + name of the location has the following format: + ``projects/{project_id}/locations/{location}`` + policy (Union[dict, ~google.cloud.dataproc_v1.types.AutoscalingPolicy]): The autoscaling policy to create. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.dataproc_v1.types.AutoscalingPolicy` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.dataproc_v1.types.AutoscalingPolicy` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "create_autoscaling_policy" not in self._inner_api_calls: + self._inner_api_calls[ + "create_autoscaling_policy" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_autoscaling_policy, + default_retry=self._method_configs["CreateAutoscalingPolicy"].retry, + default_timeout=self._method_configs["CreateAutoscalingPolicy"].timeout, + client_info=self._client_info, + ) + + request = autoscaling_policies_pb2.CreateAutoscalingPolicyRequest( + parent=parent, policy=policy + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["create_autoscaling_policy"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def update_autoscaling_policy( + self, + policy, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Updates (replaces) autoscaling policy. + + Disabled check for update\_mask, because all updates will be full + replacements. + + Example: + >>> from google.cloud import dataproc_v1 + >>> + >>> client = dataproc_v1.AutoscalingPolicyServiceClient() + >>> + >>> # TODO: Initialize `policy`: + >>> policy = {} + >>> + >>> response = client.update_autoscaling_policy(policy) + + Args: + policy (Union[dict, ~google.cloud.dataproc_v1.types.AutoscalingPolicy]): Required. The updated autoscaling policy. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.dataproc_v1.types.AutoscalingPolicy` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.dataproc_v1.types.AutoscalingPolicy` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "update_autoscaling_policy" not in self._inner_api_calls: + self._inner_api_calls[ + "update_autoscaling_policy" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_autoscaling_policy, + default_retry=self._method_configs["UpdateAutoscalingPolicy"].retry, + default_timeout=self._method_configs["UpdateAutoscalingPolicy"].timeout, + client_info=self._client_info, + ) + + request = autoscaling_policies_pb2.UpdateAutoscalingPolicyRequest(policy=policy) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("policy.name", policy.name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["update_autoscaling_policy"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def get_autoscaling_policy( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Retrieves autoscaling policy. + + Example: + >>> from google.cloud import dataproc_v1 + >>> + >>> client = dataproc_v1.AutoscalingPolicyServiceClient() + >>> + >>> name = client.autoscaling_policy_path('[PROJECT]', '[REGION]', '[AUTOSCALING_POLICY]') + >>> + >>> response = client.get_autoscaling_policy(name) + + Args: + name (str): Required. The "resource name" of the autoscaling policy, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.autoscalingPolicies.get``, the resource name + of the policy has the following format: + ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` + + - For ``projects.locations.autoscalingPolicies.get``, the resource name + of the policy has the following format: + ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.dataproc_v1.types.AutoscalingPolicy` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_autoscaling_policy" not in self._inner_api_calls: + self._inner_api_calls[ + "get_autoscaling_policy" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_autoscaling_policy, + default_retry=self._method_configs["GetAutoscalingPolicy"].retry, + default_timeout=self._method_configs["GetAutoscalingPolicy"].timeout, + client_info=self._client_info, + ) + + request = autoscaling_policies_pb2.GetAutoscalingPolicyRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_autoscaling_policy"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_autoscaling_policies( + self, + parent, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists autoscaling policies in the project. + + Example: + >>> from google.cloud import dataproc_v1 + >>> + >>> client = dataproc_v1.AutoscalingPolicyServiceClient() + >>> + >>> parent = client.region_path('[PROJECT]', '[REGION]') + >>> + >>> # Iterate over all results + >>> for element in client.list_autoscaling_policies(parent): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_autoscaling_policies(parent).pages: + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Required. The "resource name" of the region or location, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.autoscalingPolicies.list``, the resource name + of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.autoscalingPolicies.list``, the resource + name of the location has the following format: + ``projects/{project_id}/locations/{location}`` + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.dataproc_v1.types.AutoscalingPolicy` instances. + You can also iterate over the pages of the response + using its `pages` property. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_autoscaling_policies" not in self._inner_api_calls: + self._inner_api_calls[ + "list_autoscaling_policies" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_autoscaling_policies, + default_retry=self._method_configs["ListAutoscalingPolicies"].retry, + default_timeout=self._method_configs["ListAutoscalingPolicies"].timeout, + client_info=self._client_info, + ) + + request = autoscaling_policies_pb2.ListAutoscalingPoliciesRequest( + parent=parent, page_size=page_size + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_autoscaling_policies"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="policies", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator + + def delete_autoscaling_policy( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Deletes an autoscaling policy. It is an error to delete an autoscaling + policy that is in use by one or more clusters. + + Example: + >>> from google.cloud import dataproc_v1 + >>> + >>> client = dataproc_v1.AutoscalingPolicyServiceClient() + >>> + >>> name = client.autoscaling_policy_path('[PROJECT]', '[REGION]', '[AUTOSCALING_POLICY]') + >>> + >>> client.delete_autoscaling_policy(name) + + Args: + name (str): Required. The "resource name" of the autoscaling policy, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.autoscalingPolicies.delete``, the resource + name of the policy has the following format: + ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` + + - For ``projects.locations.autoscalingPolicies.delete``, the resource + name of the policy has the following format: + ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "delete_autoscaling_policy" not in self._inner_api_calls: + self._inner_api_calls[ + "delete_autoscaling_policy" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_autoscaling_policy, + default_retry=self._method_configs["DeleteAutoscalingPolicy"].retry, + default_timeout=self._method_configs["DeleteAutoscalingPolicy"].timeout, + client_info=self._client_info, + ) + + request = autoscaling_policies_pb2.DeleteAutoscalingPolicyRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + self._inner_api_calls["delete_autoscaling_policy"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) diff --git a/dataproc/google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client_config.py b/dataproc/google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client_config.py new file mode 100644 index 000000000000..61c50a6cacfd --- /dev/null +++ b/dataproc/google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client_config.py @@ -0,0 +1,48 @@ +config = { + "interfaces": { + "google.cloud.dataproc.v1.AutoscalingPolicyService": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [], + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000, + } + }, + "methods": { + "CreateAutoscalingPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "UpdateAutoscalingPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "GetAutoscalingPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "ListAutoscalingPolicies": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "DeleteAutoscalingPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + }, + } + } +} diff --git a/dataproc/google/cloud/dataproc_v1/gapic/transports/autoscaling_policy_service_grpc_transport.py b/dataproc/google/cloud/dataproc_v1/gapic/transports/autoscaling_policy_service_grpc_transport.py new file mode 100644 index 000000000000..7e815bbab312 --- /dev/null +++ b/dataproc/google/cloud/dataproc_v1/gapic/transports/autoscaling_policy_service_grpc_transport.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import google.api_core.grpc_helpers + +from google.cloud.dataproc_v1.proto import autoscaling_policies_pb2_grpc + + +class AutoscalingPolicyServiceGrpcTransport(object): + """gRPC transport class providing stubs for + google.cloud.dataproc.v1 AutoscalingPolicyService API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, channel=None, credentials=None, address="dataproc.googleapis.com:443" + ): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) + + # Create the channel. + if channel is None: + channel = self.create_channel( + address=address, + credentials=credentials, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) + + self._channel = channel + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = { + "autoscaling_policy_service_stub": autoscaling_policies_pb2_grpc.AutoscalingPolicyServiceStub( + channel + ) + } + + @classmethod + def create_channel( + cls, address="dataproc.googleapis.com:443", credentials=None, **kwargs + ): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + kwargs (dict): Keyword arguments, which are passed to the + channel creation. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs + ) + + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + + @property + def create_autoscaling_policy(self): + """Return the gRPC stub for :meth:`AutoscalingPolicyServiceClient.create_autoscaling_policy`. + + Creates new autoscaling policy. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["autoscaling_policy_service_stub"].CreateAutoscalingPolicy + + @property + def update_autoscaling_policy(self): + """Return the gRPC stub for :meth:`AutoscalingPolicyServiceClient.update_autoscaling_policy`. + + Updates (replaces) autoscaling policy. + + Disabled check for update\_mask, because all updates will be full + replacements. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["autoscaling_policy_service_stub"].UpdateAutoscalingPolicy + + @property + def get_autoscaling_policy(self): + """Return the gRPC stub for :meth:`AutoscalingPolicyServiceClient.get_autoscaling_policy`. + + Retrieves autoscaling policy. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["autoscaling_policy_service_stub"].GetAutoscalingPolicy + + @property + def list_autoscaling_policies(self): + """Return the gRPC stub for :meth:`AutoscalingPolicyServiceClient.list_autoscaling_policies`. + + Lists autoscaling policies in the project. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["autoscaling_policy_service_stub"].ListAutoscalingPolicies + + @property + def delete_autoscaling_policy(self): + """Return the gRPC stub for :meth:`AutoscalingPolicyServiceClient.delete_autoscaling_policy`. + + Deletes an autoscaling policy. It is an error to delete an autoscaling + policy that is in use by one or more clusters. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["autoscaling_policy_service_stub"].DeleteAutoscalingPolicy diff --git a/dataproc/google/cloud/dataproc_v1/types.py b/dataproc/google/cloud/dataproc_v1/types.py index 600233b2f92e..ea3e3add2426 100644 --- a/dataproc/google/cloud/dataproc_v1/types.py +++ b/dataproc/google/cloud/dataproc_v1/types.py @@ -20,6 +20,7 @@ from google.api_core.protobuf_helpers import get_messages +from google.cloud.dataproc_v1.proto import autoscaling_policies_pb2 from google.cloud.dataproc_v1.proto import clusters_pb2 from google.cloud.dataproc_v1.proto import jobs_pb2 from google.cloud.dataproc_v1.proto import operations_pb2 as proto_operations_pb2 @@ -43,7 +44,13 @@ status_pb2, ] -_local_modules = [clusters_pb2, jobs_pb2, proto_operations_pb2, workflow_templates_pb2] +_local_modules = [ + autoscaling_policies_pb2, + clusters_pb2, + jobs_pb2, + proto_operations_pb2, + workflow_templates_pb2, +] names = [] diff --git a/dataproc/synth.metadata b/dataproc/synth.metadata index 314ca5cec0e4..4562d07f181e 100644 --- a/dataproc/synth.metadata +++ b/dataproc/synth.metadata @@ -1,5 +1,5 @@ { - "updateTime": "2020-01-30T13:21:23.253293Z", + "updateTime": "2020-02-01T13:30:14.634908Z", "sources": [ { "generator": { @@ -12,9 +12,9 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "c1246a29e22b0f98e800a536b5b0da2d933a55f2", - "internalRef": "292310790", - "log": "c1246a29e22b0f98e800a536b5b0da2d933a55f2\nUpdating v1 protos with the latest inline documentation (in comments) and config options. Also adding a per-service .yaml file.\n\nPiperOrigin-RevId: 292310790\n\nb491d07cadaae7cde5608321f913e5ca1459b32d\nRevert accidental local_repository change\n\nPiperOrigin-RevId: 292245373\n\naf3400a8cb6110025198b59a0f7d018ae3cda700\nUpdate gapic-generator dependency (prebuilt PHP binary support).\n\nPiperOrigin-RevId: 292243997\n\n341fd5690fae36f36cf626ef048fbcf4bbe7cee6\ngrafeas: v1 add resource_definition for the grafeas.io/Project and change references for Project.\n\nPiperOrigin-RevId: 292221998\n\n42e915ec2ece1cd37a590fbcd10aa2c0fb0e5b06\nUpdate the gapic-generator, protoc-java-resource-name-plugin and protoc-docs-plugin to the latest commit.\n\nPiperOrigin-RevId: 292182368\n\nf035f47250675d31492a09f4a7586cfa395520a7\nFix grafeas build and update build.sh script to include gerafeas.\n\nPiperOrigin-RevId: 292168753\n\n26ccb214b7bc4a716032a6266bcb0a9ca55d6dbb\nasset: v1p1beta1 add client config annotations and retry config\n\nPiperOrigin-RevId: 292154210\n\n974ee5c0b5d03e81a50dafcedf41e0efebb5b749\nasset: v1beta1 add client config annotations\n\nPiperOrigin-RevId: 292152573\n\ncf3b61102ed5f36b827bc82ec39be09525f018c8\n Fix to protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 292034635\n\n4e1cfaa7c0fede9e65d64213ca3da1b1255816c0\nUpdate the public proto to support UTF-8 encoded id for CatalogService API, increase the ListCatalogItems deadline to 300s and some minor documentation change\n\nPiperOrigin-RevId: 292030970\n\n9c483584f8fd5a1b862ae07973f4cc7bb3e46648\nasset: add annotations to v1p1beta1\n\nPiperOrigin-RevId: 292009868\n\ne19209fac29731d0baf6d9ac23da1164f7bdca24\nAdd the google.rpc.context.AttributeContext message to the open source\ndirectories.\n\nPiperOrigin-RevId: 291999930\n\nae5662960573f279502bf98a108a35ba1175e782\noslogin API: move file level option on top of the file to avoid protobuf.js bug.\n\nPiperOrigin-RevId: 291990506\n\neba3897fff7c49ed85d3c47fc96fe96e47f6f684\nAdd cc_proto_library and cc_grpc_library targets for Spanner and IAM protos.\n\nPiperOrigin-RevId: 291988651\n\n8e981acfd9b97ea2f312f11bbaa7b6c16e412dea\nBeta launch for PersonDetection and FaceDetection features.\n\nPiperOrigin-RevId: 291821782\n\n994e067fae3b21e195f7da932b08fff806d70b5d\nasset: add annotations to v1p2beta1\n\nPiperOrigin-RevId: 291815259\n\n244e1d2c89346ca2e0701b39e65552330d68545a\nAdd Playable Locations service\n\nPiperOrigin-RevId: 291806349\n\n909f8f67963daf45dd88d020877fb9029b76788d\nasset: add annotations to v1beta2\n\nPiperOrigin-RevId: 291805301\n\n3c39a1d6e23c1ef63c7fba4019c25e76c40dfe19\nKMS: add file-level message for CryptoKeyPath, it is defined in gapic yaml but not\nin proto files.\n\nPiperOrigin-RevId: 291420695\n\nc6f3f350b8387f8d1b85ed4506f30187ebaaddc3\ncontaineranalysis: update v1beta1 and bazel build with annotations\n\nPiperOrigin-RevId: 291401900\n\n92887d74b44e4e636252b7b8477d0d2570cd82db\nfix: fix the location of grpc config file.\n\nPiperOrigin-RevId: 291396015\n\ne26cab8afd19d396b929039dac5d874cf0b5336c\nexpr: add default_host and method_signature annotations to CelService\n\nPiperOrigin-RevId: 291240093\n\n06093ae3952441c34ec176d1f7431b8765cec0be\nirm: fix v1alpha2 bazel build by adding missing proto imports\n\nPiperOrigin-RevId: 291227940\n\na8a2514af326e4673063f9a3c9d0ef1091c87e6c\nAdd proto annotation for cloud/irm API\n\nPiperOrigin-RevId: 291217859\n\n8d16f76de065f530d395a4c7eabbf766d6a120fd\nGenerate Memcache v1beta2 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 291008516\n\n3af1dabd93df9a9f17bf3624d3b875c11235360b\ngrafeas: Add containeranalysis default_host to Grafeas service\n\nPiperOrigin-RevId: 290965849\n\nbe2663fa95e31cba67d0cd62611a6674db9f74b7\nfix(google/maps/roads): add missing opening bracket\n\nPiperOrigin-RevId: 290964086\n\nfacc26550a0af0696e0534bc9cae9df14275aa7c\nUpdating v2 protos with the latest inline documentation (in comments) and adding a per-service .yaml file.\n\nPiperOrigin-RevId: 290952261\n\ncda99c1f7dc5e4ca9b1caeae1dc330838cbc1461\nChange api_name to 'asset' for v1p1beta1\n\nPiperOrigin-RevId: 290800639\n\n94e9e90c303a820ce40643d9129e7f0d2054e8a1\nAdds Google Maps Road service\n\nPiperOrigin-RevId: 290795667\n\na3b23dcb2eaecce98c600c7d009451bdec52dbda\nrpc: new message ErrorInfo, other comment updates\n\nPiperOrigin-RevId: 290781668\n\n26420ef4e46c37f193c0fbe53d6ebac481de460e\nAdd proto definition for Org Policy v1.\n\nPiperOrigin-RevId: 290771923\n\n7f0dab8177cf371ae019a082e2512de7ac102888\nPublish Routes Preferred API v1 service definitions.\n\nPiperOrigin-RevId: 290326986\n\nad6e508d0728e1d1bca6e3f328cd562718cb772d\nFix: Qualify resource type references with \"jobs.googleapis.com/\"\n\nPiperOrigin-RevId: 290285762\n\n58e770d568a2b78168ddc19a874178fee8265a9d\ncts client library\n\nPiperOrigin-RevId: 290146169\n\naf9daa4c3b4c4a8b7133b81588dd9ffd37270af2\nAdd more programming language options to public proto\n\nPiperOrigin-RevId: 290144091\n\nd9f2bbf2df301ef84641d4cec7c828736a0bd907\ntalent: add missing resource.proto dep to Bazel build target\n\nPiperOrigin-RevId: 290143164\n\n3b3968237451d027b42471cd28884a5a1faed6c7\nAnnotate Talent API.\nAdd gRPC service config for retry.\nUpdate bazel file with google.api.resource dependency.\n\nPiperOrigin-RevId: 290125172\n\n0735b4b096872960568d1f366bfa75b7b0e1f1a3\nWeekly library update.\n\nPiperOrigin-RevId: 289939042\n\n8760d3d9a4543d7f9c0d1c7870aca08b116e4095\nWeekly library update.\n\nPiperOrigin-RevId: 289939020\n\n8607df842f782a901805187e02fff598145b0b0e\nChange Talent API timeout to 30s.\n\nPiperOrigin-RevId: 289912621\n\n908155991fe32570653bcb72ecfdcfc896642f41\nAdd Recommendations AI V1Beta1\n\nPiperOrigin-RevId: 289901914\n\n5c9a8c2bebd8b71aa66d1cc473edfaac837a2c78\nAdding no-arg method signatures for ListBillingAccounts and ListServices\n\nPiperOrigin-RevId: 289891136\n\n50b0e8286ac988b0593bd890eb31fef6ea2f5767\nlongrunning: add grpc service config and default_host annotation to operations.proto\n\nPiperOrigin-RevId: 289876944\n\n6cac27dabe51c54807b0401698c32d34998948a9\n Updating default deadline for Cloud Security Command Center's v1 APIs.\n\nPiperOrigin-RevId: 289875412\n\nd99df0d67057a233c711187e0689baa4f8e6333d\nFix: Correct spelling in C# namespace option\n\nPiperOrigin-RevId: 289709813\n\n2fa8d48165cc48e35b0c62e6f7bdade12229326c\nfeat: Publish Recommender v1 to GitHub.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289619243\n\n9118db63d1ab493a2e44a3b4973fde810a835c49\nfirestore: don't retry reads that fail with Aborted\n\nFor transaction reads that fail with ABORTED, we need to rollback and start a new transaction. Our current configuration makes it so that GAPIC retries ABORTED reads multiple times without making any progress. Instead, we should retry at the transaction level.\n\nPiperOrigin-RevId: 289532382\n\n1dbfd3fe4330790b1e99c0bb20beb692f1e20b8a\nFix bazel build\nAdd other langauges (Java was already there) for bigquery/storage/v1alpha2 api.\n\nPiperOrigin-RevId: 289519766\n\nc06599cdd7d11f8d3fd25f8d3249e5bb1a3d5d73\nInitial commit of google.cloud.policytroubleshooter API, The API helps in troubleshooting GCP policies. Refer https://cloud.google.com/iam/docs/troubleshooting-access for more information\n\nPiperOrigin-RevId: 289491444\n\nfce7d80fa16ea241e87f7bc33d68595422e94ecd\nDo not pass samples option for Artman config of recommender v1 API.\n\nPiperOrigin-RevId: 289477403\n\nef179e8c61436297e6bb124352e47e45c8c80cb1\nfix: Address missing Bazel dependency.\n\nBazel builds stopped working in 06ec6d5 because\nthe google/longrunning/operations.proto file took\nan import from google/api/client.proto, but that\nimport was not added to BUILD.bazel.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446074\n\n8841655b242c84fd691d77d7bcf21b61044f01ff\nMigrate Data Labeling v1beta1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446026\n\n06ec6d5d053fff299eaa6eaa38afdd36c5e2fc68\nAdd annotations to google.longrunning.v1\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289413169\n\n0480cf40be1d3cc231f4268a2fdb36a8dd60e641\nMigrate IAM Admin v1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289411084\n\n1017173e9adeb858587639af61889ad970c728b1\nSpecify a C# namespace for BigQuery Connection v1beta1\n\nPiperOrigin-RevId: 289396763\n\nb08714b378e8e5b0c4ecdde73f92c36d6303b4b6\nfix: Integrate latest proto-docs-plugin fix.\nFixes dialogflow v2\n\nPiperOrigin-RevId: 289189004\n\n51217a67e79255ee1f2e70a6a3919df082513327\nCreate BUILD file for recommender v1\n\nPiperOrigin-RevId: 289183234\n\nacacd87263c0a60e458561b8b8ce9f67c760552a\nGenerate recommender v1 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 289177510\n\n9d2f7133b97720b1fa3601f6dcd30760ba6d8a1e\nFix kokoro build script\n\nPiperOrigin-RevId: 289166315\n\nc43a67530d2a47a0220cad20ca8de39b3fbaf2c5\ncloudtasks: replace missing RPC timeout config for v2beta2 and v2beta3\n\nPiperOrigin-RevId: 289162391\n\n4cefc229a9197236fc0adf02d69b71c0c5cf59de\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 289158456\n\n56f263fe959c50786dab42e3c61402d32d1417bd\nCatalog API: Adding config necessary to build client libraries\n\nPiperOrigin-RevId: 289149879\n\n4543762b23a57fc3c53d409efc3a9affd47b6ab3\nFix Bazel build\nbilling/v1 and dialogflow/v2 remain broken (not bazel-related issues).\nBilling has wrong configuration, dialogflow failure is caused by a bug in documentation plugin.\n\nPiperOrigin-RevId: 289140194\n\nc9dce519127b97e866ca133a01157f4ce27dcceb\nUpdate Bigtable docs\n\nPiperOrigin-RevId: 289114419\n\n802c5c5f2bf94c3facb011267d04e71942e0d09f\nMigrate DLP to proto annotations (but not GAPIC v2).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289102579\n\n6357f30f2ec3cff1d8239d18b707ff9d438ea5da\nRemove gRPC configuration file that was in the wrong place.\n\nPiperOrigin-RevId: 289096111\n\n360a8792ed62f944109d7e22d613a04a010665b4\n Protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 289011995\n\na79211c20c4f2807eec524d00123bf7c06ad3d6e\nRoll back containeranalysis v1 to GAPIC v1.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288999068\n\n9e60345ba603e03484a8aaa33ce5ffa19c1c652b\nPublish Routes Preferred API v1 proto definitions.\n\nPiperOrigin-RevId: 288941399\n\nd52885b642ad2aa1f42b132ee62dbf49a73e1e24\nMigrate the service management API to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288909426\n\n6ace586805c08896fef43e28a261337fcf3f022b\ncloudtasks: replace missing RPC timeout config\n\nPiperOrigin-RevId: 288783603\n\n51d906cabee4876b12497054b15b05d4a50ad027\nImport of Grafeas from Github.\n\nUpdate BUILD.bazel accordingly.\n\nPiperOrigin-RevId: 288783426\n\n5ef42bcd363ba0440f0ee65b3c80b499e9067ede\nMigrate Recommender v1beta1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288713066\n\n94f986afd365b7d7e132315ddcd43d7af0e652fb\nMigrate Container Analysis v1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288708382\n\n7a751a279184970d3b6ba90e4dd4d22a382a0747\nRemove Container Analysis v1alpha1 (nobody publishes it).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288707473\n\n3c0d9c71242e70474b2b640e15bb0a435fd06ff0\nRemove specious annotation from BigQuery Data Transfer before\nanyone accidentally does anything that uses it.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288701604\n\n1af307a4764bd415ef942ac5187fa1def043006f\nMigrate BigQuery Connection to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288698681\n\n" + "sha": "b5cbe4a4ba64ab19e6627573ff52057a1657773d", + "internalRef": "292647187", + "log": "b5cbe4a4ba64ab19e6627573ff52057a1657773d\nSecurityCenter v1p1beta1: move file-level option on top to workaround protobuf.js bug.\n\nPiperOrigin-RevId: 292647187\n\nb224b317bf20c6a4fbc5030b4a969c3147f27ad3\nAdds API definitions for bigqueryreservation v1beta1.\n\nPiperOrigin-RevId: 292634722\n\nc1468702f9b17e20dd59007c0804a089b83197d2\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 292626173\n\nffdfa4f55ab2f0afc11d0eb68f125ccbd5e404bd\nvision: v1p3beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292605599\n\n78f61482cd028fc1d9892aa5d89d768666a954cd\nvision: v1p1beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292605125\n\n60bb5a294a604fd1778c7ec87b265d13a7106171\nvision: v1p2beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292604980\n\n3bcf7aa79d45eb9ec29ab9036e9359ea325a7fc3\nvision: v1p4beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292604656\n\n2717b8a1c762b26911b45ecc2e4ee01d98401b28\nFix dataproc artman client library generation.\n\nPiperOrigin-RevId: 292555664\n\n7ac66d9be8a7d7de4f13566d8663978c9ee9dcd7\nAdd Dataproc Autoscaling API to V1.\n\nPiperOrigin-RevId: 292450564\n\n5d932b2c1be3a6ef487d094e3cf5c0673d0241dd\n- Improve documentation\n- Add a client_id field to StreamingPullRequest\n\nPiperOrigin-RevId: 292434036\n\neaff9fa8edec3e914995ce832b087039c5417ea7\nmonitoring: v3 publish annotations and client retry config\n\nPiperOrigin-RevId: 292425288\n\n70958bab8c5353870d31a23fb2c40305b050d3fe\nBigQuery Storage Read API v1 clients.\n\nPiperOrigin-RevId: 292407644\n\n7a15e7fe78ff4b6d5c9606a3264559e5bde341d1\nUpdate backend proto for Google Cloud Endpoints\n\nPiperOrigin-RevId: 292391607\n\n3ca2c014e24eb5111c8e7248b1e1eb833977c83d\nbazel: Add --flaky_test_attempts=3 argument to prevent CI failures caused by flaky tests\n\nPiperOrigin-RevId: 292382559\n\n9933347c1f677e81e19a844c2ef95bfceaf694fe\nbazel:Integrate latest protoc-java-resource-names-plugin changes (fix for PyYAML dependency in bazel rules)\n\nPiperOrigin-RevId: 292376626\n\nb835ab9d2f62c88561392aa26074c0b849fb0bd3\nasset: v1p2beta1 add client config annotations\n\n* remove unintentionally exposed RPCs\n* remove messages relevant to removed RPCs\n\nPiperOrigin-RevId: 292369593\n\n" } }, { @@ -46,316 +46,5 @@ "config": "google/cloud/dataproc/artman_dataproc_v1.yaml" } } - ], - "newFiles": [ - { - "path": ".coveragerc" - }, - { - "path": ".flake8" - }, - { - "path": ".repo-metadata.json" - }, - { - "path": "CHANGELOG.md" - }, - { - "path": "LICENSE" - }, - { - "path": "MANIFEST.in" - }, - { - "path": "README.rst" - }, - { - "path": "docs/README.rst" - }, - { - "path": "docs/_static/custom.css" - }, - { - "path": "docs/_templates/layout.html" - }, - { - "path": "docs/changelog.md" - }, - { - "path": "docs/conf.py" - }, - { - "path": "docs/gapic/v1/api.rst" - }, - { - "path": "docs/gapic/v1/types.rst" - }, - { - "path": "docs/gapic/v1beta2/api.rst" - }, - { - "path": "docs/gapic/v1beta2/types.rst" - }, - { - "path": "docs/index.rst" - }, - { - "path": "google/__init__.py" - }, - { - "path": "google/cloud/__init__.py" - }, - { - "path": "google/cloud/dataproc.py" - }, - { - "path": "google/cloud/dataproc_v1/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/cluster_controller_client.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/cluster_controller_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/enums.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/job_controller_client.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/job_controller_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/transports/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/transports/job_controller_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/workflow_template_service_client.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/workflow_template_service_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/autoscaling_policies.proto" - }, - { - "path": "google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/autoscaling_policies_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/clusters.proto" - }, - { - "path": "google/cloud/dataproc_v1/proto/clusters_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/jobs.proto" - }, - { - "path": "google/cloud/dataproc_v1/proto/jobs_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/jobs_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/operations.proto" - }, - { - "path": "google/cloud/dataproc_v1/proto/operations_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/operations_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/shared.proto" - }, - { - "path": "google/cloud/dataproc_v1/proto/shared_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/shared_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/workflow_templates.proto" - }, - { - "path": "google/cloud/dataproc_v1/proto/workflow_templates_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/workflow_templates_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1/types.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/cluster_controller_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/enums.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/job_controller_client.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/job_controller_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/transports/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/transports/autoscaling_policy_service_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/transports/job_controller_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/clusters.proto" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/clusters_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/jobs.proto" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/jobs_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/jobs_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/operations.proto" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/operations_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/operations_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/shared.proto" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/shared_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/shared_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/workflow_templates.proto" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/types.py" - }, - { - "path": "noxfile.py" - }, - { - "path": "setup.cfg" - }, - { - "path": "setup.py" - }, - { - "path": "synth.metadata" - }, - { - "path": "synth.py" - }, - { - "path": "tests/system/gapic/v1/test_system_cluster_controller_v1.py" - }, - { - "path": "tests/system/gapic/v1beta2/test_system_cluster_controller_v1beta2.py" - }, - { - "path": "tests/unit/gapic/v1/test_cluster_controller_client_v1.py" - }, - { - "path": "tests/unit/gapic/v1/test_job_controller_client_v1.py" - }, - { - "path": "tests/unit/gapic/v1/test_workflow_template_service_client_v1.py" - }, - { - "path": "tests/unit/gapic/v1beta2/test_autoscaling_policy_service_client_v1beta2.py" - }, - { - "path": "tests/unit/gapic/v1beta2/test_cluster_controller_client_v1beta2.py" - }, - { - "path": "tests/unit/gapic/v1beta2/test_job_controller_client_v1beta2.py" - }, - { - "path": "tests/unit/gapic/v1beta2/test_workflow_template_service_client_v1beta2.py" - } ] } \ No newline at end of file diff --git a/dataproc/tests/unit/gapic/v1/test_autoscaling_policy_service_client_v1.py b/dataproc/tests/unit/gapic/v1/test_autoscaling_policy_service_client_v1.py new file mode 100644 index 000000000000..64f2c43eb9aa --- /dev/null +++ b/dataproc/tests/unit/gapic/v1/test_autoscaling_policy_service_client_v1.py @@ -0,0 +1,281 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unit tests.""" + +import mock +import pytest + +from google.cloud import dataproc_v1 +from google.cloud.dataproc_v1.proto import autoscaling_policies_pb2 +from google.protobuf import empty_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, method, request_serializer=None, response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestAutoscalingPolicyServiceClient(object): + def test_create_autoscaling_policy(self): + # Setup Expected Response + id_ = "id3355" + name = "name3373707" + expected_response = {"id": id_, "name": name} + expected_response = autoscaling_policies_pb2.AutoscalingPolicy( + **expected_response + ) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = dataproc_v1.AutoscalingPolicyServiceClient() + + # Setup Request + parent = client.region_path("[PROJECT]", "[REGION]") + policy = {} + + response = client.create_autoscaling_policy(parent, policy) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = autoscaling_policies_pb2.CreateAutoscalingPolicyRequest( + parent=parent, policy=policy + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_autoscaling_policy_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = dataproc_v1.AutoscalingPolicyServiceClient() + + # Setup request + parent = client.region_path("[PROJECT]", "[REGION]") + policy = {} + + with pytest.raises(CustomException): + client.create_autoscaling_policy(parent, policy) + + def test_update_autoscaling_policy(self): + # Setup Expected Response + id_ = "id3355" + name = "name3373707" + expected_response = {"id": id_, "name": name} + expected_response = autoscaling_policies_pb2.AutoscalingPolicy( + **expected_response + ) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = dataproc_v1.AutoscalingPolicyServiceClient() + + # Setup Request + policy = {} + + response = client.update_autoscaling_policy(policy) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = autoscaling_policies_pb2.UpdateAutoscalingPolicyRequest( + policy=policy + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_autoscaling_policy_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = dataproc_v1.AutoscalingPolicyServiceClient() + + # Setup request + policy = {} + + with pytest.raises(CustomException): + client.update_autoscaling_policy(policy) + + def test_get_autoscaling_policy(self): + # Setup Expected Response + id_ = "id3355" + name_2 = "name2-1052831874" + expected_response = {"id": id_, "name": name_2} + expected_response = autoscaling_policies_pb2.AutoscalingPolicy( + **expected_response + ) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = dataproc_v1.AutoscalingPolicyServiceClient() + + # Setup Request + name = client.autoscaling_policy_path( + "[PROJECT]", "[REGION]", "[AUTOSCALING_POLICY]" + ) + + response = client.get_autoscaling_policy(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = autoscaling_policies_pb2.GetAutoscalingPolicyRequest( + name=name + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_autoscaling_policy_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = dataproc_v1.AutoscalingPolicyServiceClient() + + # Setup request + name = client.autoscaling_policy_path( + "[PROJECT]", "[REGION]", "[AUTOSCALING_POLICY]" + ) + + with pytest.raises(CustomException): + client.get_autoscaling_policy(name) + + def test_list_autoscaling_policies(self): + # Setup Expected Response + next_page_token = "" + policies_element = {} + policies = [policies_element] + expected_response = {"next_page_token": next_page_token, "policies": policies} + expected_response = autoscaling_policies_pb2.ListAutoscalingPoliciesResponse( + **expected_response + ) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = dataproc_v1.AutoscalingPolicyServiceClient() + + # Setup Request + parent = client.region_path("[PROJECT]", "[REGION]") + + paged_list_response = client.list_autoscaling_policies(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.policies[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = autoscaling_policies_pb2.ListAutoscalingPoliciesRequest( + parent=parent + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_autoscaling_policies_exception(self): + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = dataproc_v1.AutoscalingPolicyServiceClient() + + # Setup request + parent = client.region_path("[PROJECT]", "[REGION]") + + paged_list_response = client.list_autoscaling_policies(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_delete_autoscaling_policy(self): + channel = ChannelStub() + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = dataproc_v1.AutoscalingPolicyServiceClient() + + # Setup Request + name = client.autoscaling_policy_path( + "[PROJECT]", "[REGION]", "[AUTOSCALING_POLICY]" + ) + + client.delete_autoscaling_policy(name) + + assert len(channel.requests) == 1 + expected_request = autoscaling_policies_pb2.DeleteAutoscalingPolicyRequest( + name=name + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_autoscaling_policy_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = dataproc_v1.AutoscalingPolicyServiceClient() + + # Setup request + name = client.autoscaling_policy_path( + "[PROJECT]", "[REGION]", "[AUTOSCALING_POLICY]" + ) + + with pytest.raises(CustomException): + client.delete_autoscaling_policy(name)