Skip to content
This repository has been archived by the owner on Nov 29, 2023. It is now read-only.

Commit

Permalink
Update submit_job_to_cluster.py [(#1708)](GoogleCloudPlatform/python-…
Browse files Browse the repository at this point in the history
…docs-samples#1708)

switch region to new 'global' region and remove unnecessary function.
  • Loading branch information
jameswinegar authored and engelke committed Nov 20, 2018
1 parent 501b1d0 commit df1f2b2
Showing 1 changed file with 1 addition and 9 deletions.
10 changes: 1 addition & 9 deletions samples/snippets/submit_job_to_cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,14 +34,6 @@ def get_pyspark_file(filename):
return f, os.path.basename(filename)


def get_region_from_zone(zone):
try:
region_as_list = zone.split('-')[:-1]
return '-'.join(region_as_list)
except (AttributeError, IndexError, ValueError):
raise ValueError('Invalid zone provided, please check your input.')


def upload_pyspark_file(project_id, bucket_name, filename, file):
"""Uploads the PySpark file in this directory to the configured
input bucket."""
Expand Down Expand Up @@ -199,7 +191,7 @@ def get_client():
def main(project_id, zone, cluster_name, bucket_name,
pyspark_file=None, create_new_cluster=True):
dataproc = get_client()
region = get_region_from_zone(zone)
region = 'global'
try:
if pyspark_file:
spark_file, spark_filename = get_pyspark_file(pyspark_file)
Expand Down

0 comments on commit df1f2b2

Please sign in to comment.