diff --git a/samples/snippets/list_clusters.py b/samples/snippets/list_clusters.py index 3ecb8aeb..11acdf5d 100644 --- a/samples/snippets/list_clusters.py +++ b/samples/snippets/list_clusters.py @@ -19,7 +19,6 @@ import argparse from google.cloud import dataproc_v1 -from google.cloud.dataproc_v1.gapic.transports import cluster_controller_grpc_transport # [START dataproc_list_clusters] @@ -28,14 +27,7 @@ def list_clusters(dataproc, project, region): for cluster in dataproc.list_clusters( request={"project_id": project, "region": region} ): - print( - ( - "{} - {}".format( - cluster.cluster_name, - cluster.status.state.name - ) - ) - ) + print(("{} - {}".format(cluster.cluster_name, cluster.status.state.name))) # [END dataproc_list_clusters] @@ -49,12 +41,9 @@ def main(project_id, region): else: # Use a regional gRPC endpoint. See: # https://cloud.google.com/dataproc/docs/concepts/regional-endpoints - client_transport = ( - cluster_controller_grpc_transport.ClusterControllerGrpcTransport( - address="{}-dataproc.googleapis.com:443".format(region) - ) + dataproc_cluster_client = dataproc_v1.ClusterControllerClient( + client_options={"api_endpoint": f"{region}-dataproc.googleapis.com:443"} ) - dataproc_cluster_client = dataproc_v1.ClusterControllerClient(client_transport) list_clusters(dataproc_cluster_client, project_id, region) diff --git a/samples/snippets/noxfile_config.py b/samples/snippets/noxfile_config.py new file mode 100644 index 00000000..646d77de --- /dev/null +++ b/samples/snippets/noxfile_config.py @@ -0,0 +1,42 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default TEST_CONFIG_OVERRIDE for python repos. + +# You can copy this file into your directory, then it will be imported from +# the noxfile.py. + +# The source of truth: +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py + +TEST_CONFIG_OVERRIDE = { + # You can opt out from the test for specific Python versions. + "ignored_versions": ["2.7", "3.6"], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + # "enforce_type_hints": True, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + # "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + "gcloud_project_env": "BUILD_SPECIFIC_GCLOUD_PROJECT", + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} diff --git a/samples/snippets/submit_job_to_cluster.py b/samples/snippets/submit_job_to_cluster.py index b1024675..3b284368 100644 --- a/samples/snippets/submit_job_to_cluster.py +++ b/samples/snippets/submit_job_to_cluster.py @@ -34,8 +34,6 @@ from google.cloud import dataproc_v1 from google.cloud import storage -from google.cloud.dataproc_v1.gapic.transports import cluster_controller_grpc_transport -from google.cloud.dataproc_v1.gapic.transports import job_controller_grpc_transport DEFAULT_FILENAME = "pyspark_sort.py" @@ -77,10 +75,8 @@ def download_output(project, cluster_id, output_bucket, job_id): print("Downloading output file.") client = storage.Client(project=project) bucket = client.get_bucket(output_bucket) - output_blob = ( - "google-cloud-dataproc-metainfo/{}/jobs/{}/driveroutput.000000000".format( - cluster_id, job_id - ) + output_blob = "google-cloud-dataproc-metainfo/{}/jobs/{}/driveroutput.000000000".format( + cluster_id, job_id ) return bucket.blob(output_blob).download_as_string() @@ -135,14 +131,7 @@ def list_clusters_with_details(dataproc, project, region): for cluster in dataproc.list_clusters( request={"project_id": project, "region": region} ): - print( - ( - "{} - {}".format( - cluster.cluster_name, - cluster.status.state.name, - ) - ) - ) + print(("{} - {}".format(cluster.cluster_name, cluster.status.state.name,))) # [END dataproc_list_clusters_with_detail] @@ -232,16 +221,12 @@ def main( region = get_region_from_zone(zone) # Use a regional gRPC endpoint. See: # https://cloud.google.com/dataproc/docs/concepts/regional-endpoints - client_transport = ( - cluster_controller_grpc_transport.ClusterControllerGrpcTransport( - address="{}-dataproc.googleapis.com:443".format(region) - ) + dataproc_cluster_client = dataproc_v1.ClusterControllerClient( + client_options={"api_endpoint": f"{region}-dataproc.googleapis.com:443"} ) - job_transport = job_controller_grpc_transport.JobControllerGrpcTransport( - address="{}-dataproc.googleapis.com:443".format(region) + dataproc_job_client = dataproc_v1.ClusterControllerClient( + client_options={"api_endpoint": f"{region}-dataproc.googleapis.com:443"} ) - dataproc_cluster_client = dataproc_v1.ClusterControllerClient(client_transport) - dataproc_job_client = dataproc_v1.JobControllerClient(job_transport) # [END dataproc_get_client] try: diff --git a/samples/snippets/update_cluster.py b/samples/snippets/update_cluster.py index f4520224..bae6eee2 100644 --- a/samples/snippets/update_cluster.py +++ b/samples/snippets/update_cluster.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python + # Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License");