Skip to content
This repository has been archived by the owner on Nov 29, 2023. It is now read-only.

Commit

Permalink
fix: add mains to samples [(#3284)](GoogleCloudPlatform/python-docs-s…
Browse files Browse the repository at this point in the history
…amples#3284)

Added mains to two samples: create_cluster and instantiate_inline_workflow_templates.

Fixed their associated tests to accommodate this.

Removed subprocess from quickstart/quickstart_test.py to fix [2873](GoogleCloudPlatform/python-docs-samples#2873)

fixes #2873
  • Loading branch information
bradmiro committed Apr 14, 2020
1 parent 2b75ceb commit 6c2132c
Show file tree
Hide file tree
Showing 4 changed files with 37 additions and 17 deletions.
20 changes: 18 additions & 2 deletions samples/snippets/create_cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,12 @@

# This sample walks a user through creating a Cloud Dataproc cluster using
# the Python client library.
#
# This script can be run on its own:
# python create_cluster.py ${PROJECT_ID} ${REGION} ${CLUSTER_NAME}


import sys

# [START dataproc_create_cluster]
from google.cloud import dataproc_v1 as dataproc
Expand All @@ -33,7 +39,7 @@ def create_cluster(project_id, region, cluster_name):

# Create a client with the endpoint set to the desired cluster region.
cluster_client = dataproc.ClusterControllerClient(client_options={
'api_endpoint': '{}-dataproc.googleapis.com:443'.format(region)
'api_endpoint': f'{region}-dataproc.googleapis.com:443',
})

# Create the cluster config.
Expand All @@ -57,5 +63,15 @@ def create_cluster(project_id, region, cluster_name):
result = operation.result()

# Output a success message.
print('Cluster created successfully: {}'.format(result.cluster_name))
print(f'Cluster created successfully: {result.cluster_name}')
# [END dataproc_create_cluster]


if __name__ == "__main__":
if len(sys.argv) < 4:
sys.exit('python create_cluster.py project_id region cluster_name')

project_id = sys.argv[1]
region = sys.argv[2]
cluster_name = sys.argv[3]
create_cluster(project_id, region, cluster_name)
2 changes: 1 addition & 1 deletion samples/snippets/create_cluster_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def teardown():
yield

cluster_client = dataproc.ClusterControllerClient(client_options={
'api_endpoint': '{}-dataproc.googleapis.com:443'.format(REGION)
'api_endpoint': f'{REGION}-dataproc.googleapis.com:443'
})
# Client library function
operation = cluster_client.delete_cluster(PROJECT_ID, REGION, CLUSTER_NAME)
Expand Down
17 changes: 13 additions & 4 deletions samples/snippets/instantiate_inline_workflow_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,11 @@
# workflow for Cloud Dataproc using the Python client library.
#
# This script can be run on its own:
# python workflows.py ${PROJECT_ID} ${REGION}
# python instantiate_inline_workflow_template.py ${PROJECT_ID} ${REGION}


import sys

# [START dataproc_instantiate_inline_workflow_template]
from google.cloud import dataproc_v1 as dataproc

Expand All @@ -35,7 +37,8 @@ def instantiate_inline_workflow_template(project_id, region):
# Create a client with the endpoint set to the desired region.
workflow_template_client = dataproc.WorkflowTemplateServiceClient(
client_options={
'api_endpoint': '{}-dataproc.googleapis.com:443'.format(region)}
'api_endpoint': f'{region}-dataproc.googleapis.com:443'
}
)

parent = workflow_template_client.region_path(project_id, region)
Expand Down Expand Up @@ -91,8 +94,14 @@ def instantiate_inline_workflow_template(project_id, region):

# Output a success message.
print('Workflow ran successfully.')
# [END dataproc_instantiate_inline_workflow_template]
# [END dataproc_instantiate_inline_workflow_template]


if __name__ == "__main__":
instantiate_inline_workflow_template(sys.argv[1], sys.argv[2])
if len(sys.argv) < 3:
sys.exit('python instantiate_inline_workflow_template.py '
+ 'project_id region')

project_id = sys.argv[1]
region = sys.argv[2]
instantiate_inline_workflow_template(project_id, region)
15 changes: 5 additions & 10 deletions samples/snippets/quickstart/quickstart_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,12 @@
import os
import uuid
import pytest
import subprocess

from google.cloud import dataproc_v1 as dataproc
from google.cloud import storage

import quickstart


PROJECT_ID = os.environ['GCLOUD_PROJECT']
REGION = 'us-central1'
Expand Down Expand Up @@ -60,15 +61,9 @@ def setup_teardown():
bucket.delete()


def test_quickstart():
command = [
'python', 'quickstart/quickstart.py',
'--project_id', PROJECT_ID,
'--region', REGION,
'--cluster_name', CLUSTER_NAME,
'--job_file_path', JOB_FILE_PATH
]
out = subprocess.check_output(command).decode("utf-8")
def test_quickstart(capsys):
quickstart.quickstart(PROJECT_ID, REGION, CLUSTER_NAME, JOB_FILE_PATH)
out, _ = capsys.readouterr()

assert 'Cluster created successfully' in out
assert 'Submitted job' in out
Expand Down

0 comments on commit 6c2132c

Please sign in to comment.