Skip to content

Commit

Permalink
docs(dataproc): remove 'cloud' from references to dataproc; add py2 d…
Browse files Browse the repository at this point in the history
…eprecation warning; add 3.8 tests (via synth) (#10066)
  • Loading branch information
yoshi-automation committed Jan 29, 2020
1 parent ee342a8 commit 13d8a87
Show file tree
Hide file tree
Showing 21 changed files with 276 additions and 273 deletions.
2 changes: 1 addition & 1 deletion dataproc/docs/_static/custom.css
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
div#python2-eol {
border-color: red;
border-width: medium;
}
}
1 change: 1 addition & 0 deletions dataproc/docs/_templates/layout.html
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@

{% extends "!layout.html" %}
{%- block content %}
{%- if theme_fixed_sidebar|lower == 'true' %}
Expand Down
11 changes: 11 additions & 0 deletions dataproc/google/cloud/dataproc_v1/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@


from __future__ import absolute_import
import sys
import warnings

from google.cloud.dataproc_v1 import types
from google.cloud.dataproc_v1.gapic import cluster_controller_client
Expand All @@ -24,6 +26,15 @@
from google.cloud.dataproc_v1.gapic import workflow_template_service_client


if sys.version_info[:2] == (2, 7):
message = (
"A future version of this library will drop support for Python 2.7."
"More details about Python 2 support for Google Cloud Client Libraries"
"can be found at https://cloud.google.com/python/docs/python2-sunset/"
)
warnings.warn(message, DeprecationWarning)


class ClusterControllerClient(cluster_controller_client.ClusterControllerClient):
__doc__ = cluster_controller_client.ClusterControllerClient.__doc__
enums = enums
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@ def create_cluster(
Args:
project_id (str): Required. The ID of the Google Cloud Platform project that the cluster
belongs to.
region (str): Required. The Cloud Dataproc region in which to handle the request.
region (str): Required. The Dataproc region in which to handle the request.
cluster (Union[dict, ~google.cloud.dataproc_v1.types.Cluster]): Required. The cluster to create.
If a dict is provided, it must be of the same form as the protobuf
Expand Down Expand Up @@ -346,7 +346,7 @@ def update_cluster(
Args:
project_id (str): Required. The ID of the Google Cloud Platform project the
cluster belongs to.
region (str): Required. The Cloud Dataproc region in which to handle the request.
region (str): Required. The Dataproc region in which to handle the request.
cluster_name (str): Required. The cluster name.
cluster (Union[dict, ~google.cloud.dataproc_v1.types.Cluster]): Required. The changes to the cluster.
Expand Down Expand Up @@ -529,7 +529,7 @@ def delete_cluster(
Args:
project_id (str): Required. The ID of the Google Cloud Platform project that the cluster
belongs to.
region (str): Required. The Cloud Dataproc region in which to handle the request.
region (str): Required. The Dataproc region in which to handle the request.
cluster_name (str): Required. The cluster name.
cluster_uuid (str): Optional. Specifying the ``cluster_uuid`` means the RPC should fail
(with error NOT\_FOUND) if cluster with specified UUID does not exist.
Expand Down Expand Up @@ -622,7 +622,7 @@ def get_cluster(
Args:
project_id (str): Required. The ID of the Google Cloud Platform project that the cluster
belongs to.
region (str): Required. The Cloud Dataproc region in which to handle the request.
region (str): Required. The Dataproc region in which to handle the request.
cluster_name (str): Required. The cluster name.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
Expand Down Expand Up @@ -702,7 +702,7 @@ def list_clusters(
Args:
project_id (str): Required. The ID of the Google Cloud Platform project that the cluster
belongs to.
region (str): Required. The Cloud Dataproc region in which to handle the request.
region (str): Required. The Dataproc region in which to handle the request.
filter_ (str): Optional. A filter constraining the clusters to list. Filters are
case-sensitive and have the following syntax:
Expand Down Expand Up @@ -823,7 +823,7 @@ def diagnose_cluster(
Args:
project_id (str): Required. The ID of the Google Cloud Platform project that the cluster
belongs to.
region (str): Required. The Cloud Dataproc region in which to handle the request.
region (str): Required. The Dataproc region in which to handle the request.
cluster_name (str): Required. The cluster name.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
Expand Down
4 changes: 2 additions & 2 deletions dataproc/google/cloud/dataproc_v1/gapic/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ class Substate(enum.IntEnum):
Applies to RUNNING state.
STALE_STATUS (int): The agent-reported status is out of date (may occur if
Cloud Dataproc loses communication with Agent).
Dataproc loses communication with Agent).
Applies to RUNNING state.
"""
Expand Down Expand Up @@ -148,7 +148,7 @@ class Substate(enum.IntEnum):
Applies to RUNNING state.
STALE_STATUS (int): The agent-reported status is out of date, which may be caused by a
loss of communication between the agent and Cloud Dataproc. If the
loss of communication between the agent and Dataproc. If the
agent does not send a timely update, the job will fail.
Applies to RUNNING state.
Expand Down
12 changes: 6 additions & 6 deletions dataproc/google/cloud/dataproc_v1/gapic/job_controller_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ def submit_job(
Args:
project_id (str): Required. The ID of the Google Cloud Platform project that the job
belongs to.
region (str): Required. The Cloud Dataproc region in which to handle the request.
region (str): Required. The Dataproc region in which to handle the request.
job (Union[dict, ~google.cloud.dataproc_v1.types.Job]): Required. The job resource.
If a dict is provided, it must be of the same form as the protobuf
Expand Down Expand Up @@ -304,7 +304,7 @@ def get_job(
Args:
project_id (str): Required. The ID of the Google Cloud Platform project that the job
belongs to.
region (str): Required. The Cloud Dataproc region in which to handle the request.
region (str): Required. The Dataproc region in which to handle the request.
job_id (str): Required. The job ID.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
Expand Down Expand Up @@ -386,7 +386,7 @@ def list_jobs(
Args:
project_id (str): Required. The ID of the Google Cloud Platform project that the job
belongs to.
region (str): Required. The Cloud Dataproc region in which to handle the request.
region (str): Required. The Dataproc region in which to handle the request.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
Expand Down Expand Up @@ -507,7 +507,7 @@ def update_job(
Args:
project_id (str): Required. The ID of the Google Cloud Platform project that the job
belongs to.
region (str): Required. The Cloud Dataproc region in which to handle the request.
region (str): Required. The Dataproc region in which to handle the request.
job_id (str): Required. The job ID.
job (Union[dict, ~google.cloud.dataproc_v1.types.Job]): Required. The changes to the job.
Expand Down Expand Up @@ -597,7 +597,7 @@ def cancel_job(
Args:
project_id (str): Required. The ID of the Google Cloud Platform project that the job
belongs to.
region (str): Required. The Cloud Dataproc region in which to handle the request.
region (str): Required. The Dataproc region in which to handle the request.
job_id (str): Required. The job ID.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
Expand Down Expand Up @@ -668,7 +668,7 @@ def delete_job(
Args:
project_id (str): Required. The ID of the Google Cloud Platform project that the job
belongs to.
region (str): Required. The Cloud Dataproc region in which to handle the request.
region (str): Required. The Dataproc region in which to handle the request.
job_id (str): Required. The job ID.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@
class WorkflowTemplateServiceClient(object):
"""
The API interface for managing Workflow Templates in the
Cloud Dataproc API.
Dataproc API.
"""

SERVICE_ADDRESS = "dataproc.googleapis.com:443"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ option java_outer_classname = "AutoscalingPoliciesProto";
option java_package = "com.google.cloud.dataproc.v1";

// The API interface for managing autoscaling policies in the
// Google Cloud Dataproc API.
// Dataproc API.
service AutoscalingPolicyService {
option (google.api.default_host) = "dataproc.googleapis.com";
option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

class AutoscalingPolicyServiceStub(object):
"""The API interface for managing autoscaling policies in the
Google Cloud Dataproc API.
Dataproc API.
"""

def __init__(self, channel):
Expand Down Expand Up @@ -47,7 +47,7 @@ def __init__(self, channel):

class AutoscalingPolicyServiceServicer(object):
"""The API interface for managing autoscaling policies in the
Google Cloud Dataproc API.
Dataproc API.
"""

def CreateAutoscalingPolicy(self, request, context):
Expand Down
Loading

0 comments on commit 13d8a87

Please sign in to comment.