Skip to content
This repository was archived by the owner on Nov 29, 2023. It is now read-only.

Commit df1f2b2

Browse files
jameswinegarengelke
authored andcommitted
Update submit_job_to_cluster.py [(#1708)](GoogleCloudPlatform/python-docs-samples#1708)
switch region to new 'global' region and remove unnecessary function.
1 parent 501b1d0 commit df1f2b2

File tree

1 file changed

+1
-9
lines changed

1 file changed

+1
-9
lines changed

samples/snippets/submit_job_to_cluster.py

+1-9
Original file line numberDiff line numberDiff line change
@@ -34,14 +34,6 @@ def get_pyspark_file(filename):
3434
return f, os.path.basename(filename)
3535

3636

37-
def get_region_from_zone(zone):
38-
try:
39-
region_as_list = zone.split('-')[:-1]
40-
return '-'.join(region_as_list)
41-
except (AttributeError, IndexError, ValueError):
42-
raise ValueError('Invalid zone provided, please check your input.')
43-
44-
4537
def upload_pyspark_file(project_id, bucket_name, filename, file):
4638
"""Uploads the PySpark file in this directory to the configured
4739
input bucket."""
@@ -199,7 +191,7 @@ def get_client():
199191
def main(project_id, zone, cluster_name, bucket_name,
200192
pyspark_file=None, create_new_cluster=True):
201193
dataproc = get_client()
202-
region = get_region_from_zone(zone)
194+
region = 'global'
203195
try:
204196
if pyspark_file:
205197
spark_file, spark_filename = get_pyspark_file(pyspark_file)

0 commit comments

Comments
 (0)