Skip to content
Permalink
Browse files
feat: add SparkR and Presto jobs to WorkflowTemplates; add new option…
…al components; add submit_job_as_operation to v1 (via synth) (#21)
  • Loading branch information
yoshi-automation committed May 19, 2020
1 parent ac2cc78 commit 1cf10b6b127a63dbeb34958771c2cc8d8cb37099
Showing with 4,961 additions and 3,485 deletions.
  1. +0 −2 .kokoro/publish-docs.sh
  2. +2 −2 docs/gapic/v1/api.rst
  3. +2 −2 docs/gapic/v1/types.rst
  4. +2 −2 docs/gapic/v1beta2/api.rst
  5. +2 −2 docs/gapic/v1beta2/types.rst
  6. +2 −0 docs/index.rst
  7. +7 −0 docs/multiprocessing.rst
  8. +1 −1 google/cloud/dataproc.py
  9. +8 −8 google/cloud/dataproc_v1/__init__.py
  10. +64 −63 google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client.py
  11. +4 −4 google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client_config.py
  12. +32 −30 google/cloud/dataproc_v1/gapic/cluster_controller_client.py
  13. +3 −3 google/cloud/dataproc_v1/gapic/cluster_controller_client_config.py
  14. +6 −2 google/cloud/dataproc_v1/gapic/enums.py
  15. +111 −8 google/cloud/dataproc_v1/gapic/job_controller_client.py
  16. +12 −6 google/cloud/dataproc_v1/gapic/job_controller_client_config.py
  17. +11 −11 google/cloud/dataproc_v1/gapic/transports/autoscaling_policy_service_grpc_transport.py
  18. +9 −9 google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py
  19. +21 −0 google/cloud/dataproc_v1/gapic/transports/job_controller_grpc_transport.py
  20. +31 −17 google/cloud/dataproc_v1/gapic/workflow_template_service_client.py
  21. +1 −1 google/cloud/dataproc_v1/gapic/workflow_template_service_client_config.py
  22. +8 −3 google/cloud/dataproc_v1/proto/autoscaling_policies.proto
  23. +123 −151 google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py
  24. +44 −26 google/cloud/dataproc_v1/proto/clusters.proto
  25. +495 −497 google/cloud/dataproc_v1/proto/clusters_pb2.py
  26. +1 −1 google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py
  27. +38 −1 google/cloud/dataproc_v1/proto/jobs.proto
  28. +590 −450 google/cloud/dataproc_v1/proto/jobs_pb2.py
  29. +20 −0 google/cloud/dataproc_v1/proto/jobs_pb2_grpc.py
  30. +38 −45 google/cloud/dataproc_v1/proto/operations_pb2.py
  31. +6 −0 google/cloud/dataproc_v1/proto/shared.proto
  32. +12 −11 google/cloud/dataproc_v1/proto/shared_pb2.py
  33. +6 −0 google/cloud/dataproc_v1/proto/workflow_templates.proto
  34. +399 −378 google/cloud/dataproc_v1/proto/workflow_templates_pb2.py
  35. +25 −14 google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client.py
  36. +1 −1 google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client_config.py
  37. +31 −31 google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py
  38. +6 −6 google/cloud/dataproc_v1beta2/gapic/cluster_controller_client_config.py
  39. +16 −2 google/cloud/dataproc_v1beta2/gapic/enums.py
  40. +109 −8 google/cloud/dataproc_v1beta2/gapic/job_controller_client.py
  41. +15 −9 google/cloud/dataproc_v1beta2/gapic/job_controller_client_config.py
  42. +1 −1 google/cloud/dataproc_v1beta2/gapic/transports/autoscaling_policy_service_grpc_transport.py
  43. +9 −9 google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py
  44. +21 −0 google/cloud/dataproc_v1beta2/gapic/transports/job_controller_grpc_transport.py
  45. +29 −17 google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py
  46. +1 −1 google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client_config.py
  47. +6 −7 google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto
  48. +116 −147 google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py
  49. +78 −24 google/cloud/dataproc_v1beta2/proto/clusters.proto
  50. +689 −511 google/cloud/dataproc_v1beta2/proto/clusters_pb2.py
  51. +1 −1 google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py
  52. +87 −7 google/cloud/dataproc_v1beta2/proto/jobs.proto
  53. +922 −452 google/cloud/dataproc_v1beta2/proto/jobs_pb2.py
  54. +20 −0 google/cloud/dataproc_v1beta2/proto/jobs_pb2_grpc.py
  55. +26 −33 google/cloud/dataproc_v1beta2/proto/operations_pb2.py
  56. +12 −0 google/cloud/dataproc_v1beta2/proto/shared.proto
  57. +26 −17 google/cloud/dataproc_v1beta2/proto/shared_pb2.py
  58. +16 −5 google/cloud/dataproc_v1beta2/proto/workflow_templates.proto
  59. +388 −357 google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py
  60. +6 −15 synth.metadata
  61. +5 −0 synth.py
  62. +18 −26 tests/unit/gapic/v1/test_autoscaling_policy_service_client_v1.py
  63. +76 −0 tests/unit/gapic/v1/test_job_controller_client_v1.py
  64. +6 −18 tests/unit/gapic/v1/test_workflow_template_service_client_v1.py
  65. +4 −12 tests/unit/gapic/v1beta2/test_autoscaling_policy_service_client_v1beta2.py
  66. +78 −0 tests/unit/gapic/v1beta2/test_job_controller_client_v1beta2.py
  67. +6 −18 tests/unit/gapic/v1beta2/test_workflow_template_service_client_v1beta2.py
@@ -13,8 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.

#!/bin/bash

set -eo pipefail

# Disable buffering, so that the logs stream through.
@@ -1,5 +1,5 @@
Client for Google Cloud Dataproc API
====================================
Client for Cloud Dataproc API
=============================

.. automodule:: google.cloud.dataproc_v1
:members:
@@ -1,5 +1,5 @@
Types for Google Cloud Dataproc API Client
==========================================
Types for Cloud Dataproc API Client
===================================

.. automodule:: google.cloud.dataproc_v1.types
:members:
@@ -1,5 +1,5 @@
Client for Google Cloud Dataproc API
====================================
Client for Cloud Dataproc API
=============================

.. automodule:: google.cloud.dataproc_v1beta2
:members:
@@ -1,5 +1,5 @@
Types for Google Cloud Dataproc API Client
==========================================
Types for Cloud Dataproc API Client
===================================

.. automodule:: google.cloud.dataproc_v1beta2.types
:members:
@@ -1,5 +1,7 @@
.. include:: README.rst

.. include:: multiprocessing.rst

API Reference
-------------
.. toctree::
@@ -0,0 +1,7 @@
.. note::

Because this client uses :mod:`grpcio` library, it is safe to
share instances across threads. In multiprocessing scenarios, the best
practice is to create client instances *after* the invocation of
:func:`os.fork` by :class:`multiprocessing.Pool` or
:class:`multiprocessing.Process`.
@@ -28,8 +28,8 @@
__all__ = (
"enums",
"types",
"AutoscalingPolicyServiceClient",
"ClusterControllerClient",
"JobControllerClient",
"WorkflowTemplateServiceClient",
"AutoscalingPolicyServiceClient",
)
@@ -36,6 +36,13 @@
warnings.warn(message, DeprecationWarning)


class AutoscalingPolicyServiceClient(
autoscaling_policy_service_client.AutoscalingPolicyServiceClient
):
__doc__ = autoscaling_policy_service_client.AutoscalingPolicyServiceClient.__doc__
enums = enums


class ClusterControllerClient(cluster_controller_client.ClusterControllerClient):
__doc__ = cluster_controller_client.ClusterControllerClient.__doc__
enums = enums
@@ -53,18 +60,11 @@ class WorkflowTemplateServiceClient(
enums = enums


class AutoscalingPolicyServiceClient(
autoscaling_policy_service_client.AutoscalingPolicyServiceClient
):
__doc__ = autoscaling_policy_service_client.AutoscalingPolicyServiceClient.__doc__
enums = enums


__all__ = (
"enums",
"types",
"AutoscalingPolicyServiceClient",
"ClusterControllerClient",
"JobControllerClient",
"WorkflowTemplateServiceClient",
"AutoscalingPolicyServiceClient",
)
@@ -38,17 +38,7 @@
)
from google.cloud.dataproc_v1.proto import autoscaling_policies_pb2
from google.cloud.dataproc_v1.proto import autoscaling_policies_pb2_grpc
from google.cloud.dataproc_v1.proto import clusters_pb2
from google.cloud.dataproc_v1.proto import clusters_pb2_grpc
from google.cloud.dataproc_v1.proto import jobs_pb2
from google.cloud.dataproc_v1.proto import jobs_pb2_grpc
from google.cloud.dataproc_v1.proto import operations_pb2 as proto_operations_pb2
from google.cloud.dataproc_v1.proto import workflow_templates_pb2
from google.cloud.dataproc_v1.proto import workflow_templates_pb2_grpc
from google.longrunning import operations_pb2 as longrunning_operations_pb2
from google.protobuf import duration_pb2
from google.protobuf import empty_pb2
from google.protobuf import field_mask_pb2


_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-dataproc").version
@@ -88,15 +78,24 @@ def from_service_account_file(cls, filename, *args, **kwargs):
from_service_account_json = from_service_account_file

@classmethod
def autoscaling_policy_path(cls, project, region, autoscaling_policy):
def autoscaling_policy_path(cls, project, location, autoscaling_policy):
"""Return a fully-qualified autoscaling_policy string."""
return google.api_core.path_template.expand(
"projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}",
"projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}",
project=project,
region=region,
location=location,
autoscaling_policy=autoscaling_policy,
)

@classmethod
def location_path(cls, project, location):
"""Return a fully-qualified location string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}",
project=project,
location=location,
)

@classmethod
def region_path(cls, project, region):
"""Return a fully-qualified region string."""
@@ -217,41 +216,31 @@ def __init__(
self._inner_api_calls = {}

# Service calls
def create_autoscaling_policy(
def update_autoscaling_policy(
self,
parent,
policy,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates new autoscaling policy.
Updates (replaces) autoscaling policy.
Disabled check for update_mask, because all updates will be full
replacements.
Example:
>>> from google.cloud import dataproc_v1
>>>
>>> client = dataproc_v1.AutoscalingPolicyServiceClient()
>>>
>>> parent = client.region_path('[PROJECT]', '[REGION]')
>>>
>>> # TODO: Initialize `policy`:
>>> policy = {}
>>>
>>> response = client.create_autoscaling_policy(parent, policy)
>>> response = client.update_autoscaling_policy(policy)
Args:
parent (str): Required. The "resource name" of the region or location, as described in
https://cloud.google.com/apis/design/resource\_names.
- For ``projects.regions.autoscalingPolicies.create``, the resource
name of the region has the following format:
``projects/{project_id}/regions/{region}``
- For ``projects.locations.autoscalingPolicies.create``, the resource
name of the location has the following format:
``projects/{project_id}/locations/{location}``
policy (Union[dict, ~google.cloud.dataproc_v1.types.AutoscalingPolicy]): The autoscaling policy to create.
policy (Union[dict, ~google.cloud.dataproc_v1.types.AutoscalingPolicy]): Required. The updated autoscaling policy.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.dataproc_v1.types.AutoscalingPolicy`
@@ -275,24 +264,22 @@ def create_autoscaling_policy(
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_autoscaling_policy" not in self._inner_api_calls:
if "update_autoscaling_policy" not in self._inner_api_calls:
self._inner_api_calls[
"create_autoscaling_policy"
"update_autoscaling_policy"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_autoscaling_policy,
default_retry=self._method_configs["CreateAutoscalingPolicy"].retry,
default_timeout=self._method_configs["CreateAutoscalingPolicy"].timeout,
self.transport.update_autoscaling_policy,
default_retry=self._method_configs["UpdateAutoscalingPolicy"].retry,
default_timeout=self._method_configs["UpdateAutoscalingPolicy"].timeout,
client_info=self._client_info,
)

request = autoscaling_policies_pb2.CreateAutoscalingPolicyRequest(
parent=parent, policy=policy
)
request = autoscaling_policies_pb2.UpdateAutoscalingPolicyRequest(policy=policy)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
routing_header = [("policy.name", policy.name)]
except AttributeError:
pass
else:
@@ -301,35 +288,45 @@ def create_autoscaling_policy(
)
metadata.append(routing_metadata)

return self._inner_api_calls["create_autoscaling_policy"](
return self._inner_api_calls["update_autoscaling_policy"](
request, retry=retry, timeout=timeout, metadata=metadata
)

def update_autoscaling_policy(
def create_autoscaling_policy(
self,
parent,
policy,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Updates (replaces) autoscaling policy.
Disabled check for update\_mask, because all updates will be full
replacements.
Creates new autoscaling policy.
Example:
>>> from google.cloud import dataproc_v1
>>>
>>> client = dataproc_v1.AutoscalingPolicyServiceClient()
>>>
>>> parent = client.region_path('[PROJECT]', '[REGION]')
>>>
>>> # TODO: Initialize `policy`:
>>> policy = {}
>>>
>>> response = client.update_autoscaling_policy(policy)
>>> response = client.create_autoscaling_policy(parent, policy)
Args:
policy (Union[dict, ~google.cloud.dataproc_v1.types.AutoscalingPolicy]): Required. The updated autoscaling policy.
parent (str): Required. The "resource name" of the region or location, as
described in https://cloud.google.com/apis/design/resource_names.
- For ``projects.regions.autoscalingPolicies.create``, the resource
name of the region has the following format:
``projects/{project_id}/regions/{region}``
- For ``projects.locations.autoscalingPolicies.create``, the resource
name of the location has the following format:
``projects/{project_id}/locations/{location}``
policy (Union[dict, ~google.cloud.dataproc_v1.types.AutoscalingPolicy]): Required. The autoscaling policy to create.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.dataproc_v1.types.AutoscalingPolicy`
@@ -353,22 +350,24 @@ def update_autoscaling_policy(
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "update_autoscaling_policy" not in self._inner_api_calls:
if "create_autoscaling_policy" not in self._inner_api_calls:
self._inner_api_calls[
"update_autoscaling_policy"
"create_autoscaling_policy"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.update_autoscaling_policy,
default_retry=self._method_configs["UpdateAutoscalingPolicy"].retry,
default_timeout=self._method_configs["UpdateAutoscalingPolicy"].timeout,
self.transport.create_autoscaling_policy,
default_retry=self._method_configs["CreateAutoscalingPolicy"].retry,
default_timeout=self._method_configs["CreateAutoscalingPolicy"].timeout,
client_info=self._client_info,
)

request = autoscaling_policies_pb2.UpdateAutoscalingPolicyRequest(policy=policy)
request = autoscaling_policies_pb2.CreateAutoscalingPolicyRequest(
parent=parent, policy=policy
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("policy.name", policy.name)]
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
@@ -377,7 +376,7 @@ def update_autoscaling_policy(
)
metadata.append(routing_metadata)

return self._inner_api_calls["update_autoscaling_policy"](
return self._inner_api_calls["create_autoscaling_policy"](
request, retry=retry, timeout=timeout, metadata=metadata
)

@@ -396,13 +395,14 @@ def get_autoscaling_policy(
>>>
>>> client = dataproc_v1.AutoscalingPolicyServiceClient()
>>>
>>> name = client.autoscaling_policy_path('[PROJECT]', '[REGION]', '[AUTOSCALING_POLICY]')
>>> # TODO: Initialize `name`:
>>> name = ''
>>>
>>> response = client.get_autoscaling_policy(name)
Args:
name (str): Required. The "resource name" of the autoscaling policy, as described in
https://cloud.google.com/apis/design/resource\_names.
name (str): Required. The "resource name" of the autoscaling policy, as
described in https://cloud.google.com/apis/design/resource_names.
- For ``projects.regions.autoscalingPolicies.get``, the resource name
of the policy has the following format:
@@ -492,8 +492,8 @@ def list_autoscaling_policies(
... pass
Args:
parent (str): Required. The "resource name" of the region or location, as described in
https://cloud.google.com/apis/design/resource\_names.
parent (str): Required. The "resource name" of the region or location, as
described in https://cloud.google.com/apis/design/resource_names.
- For ``projects.regions.autoscalingPolicies.list``, the resource name
of the region has the following format:
@@ -587,13 +587,14 @@ def delete_autoscaling_policy(
>>>
>>> client = dataproc_v1.AutoscalingPolicyServiceClient()
>>>
>>> name = client.autoscaling_policy_path('[PROJECT]', '[REGION]', '[AUTOSCALING_POLICY]')
>>> # TODO: Initialize `name`:
>>> name = ''
>>>
>>> client.delete_autoscaling_policy(name)
Args:
name (str): Required. The "resource name" of the autoscaling policy, as described in
https://cloud.google.com/apis/design/resource\_names.
name (str): Required. The "resource name" of the autoscaling policy, as
described in https://cloud.google.com/apis/design/resource_names.
- For ``projects.regions.autoscalingPolicies.delete``, the resource
name of the policy has the following format:
@@ -17,14 +17,14 @@
}
},
"methods": {
"CreateAutoscalingPolicy": {
"UpdateAutoscalingPolicy": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"UpdateAutoscalingPolicy": {
"CreateAutoscalingPolicy": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"GetAutoscalingPolicy": {

0 comments on commit 1cf10b6

Please sign in to comment.