Skip to content

Commit

Permalink
Improve and fix Cloud Spanner samples that transfer marketing budget (#…
Browse files Browse the repository at this point in the history
…2198)

The samples that transfer part of an album's marketing budget had some issues:

+ `read_write_transaction`: Compared `second_album_budget` with an arbitrary integer, rather than explicitly checking against `transfer_amount`.
+ `write_with_dml_transaction`: Moved money from album 1 to album 2, even though `read_write_transaction` was the other way around. Also retrieved album 1's budget where it should have retrieved album 2's budget.

This change fixes those issues and updates the tests accordingly.
  • Loading branch information
hegemonic authored and jsimonweb committed Jun 19, 2019
1 parent e124961 commit d964673
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 15 deletions.
28 changes: 14 additions & 14 deletions spanner/cloud-client/snippets.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,7 +414,7 @@ def update_albums(transaction):

transfer_amount = 200000

if second_album_budget < 300000:
if second_album_budget < transfer_amount:
# Raising an exception will automatically roll back the
# transaction.
raise ValueError(
Expand Down Expand Up @@ -965,7 +965,7 @@ def query_data_with_parameter(instance_id, database_id):


def write_with_dml_transaction(instance_id, database_id):
""" Transfers a marketing budget from one album to another. """
""" Transfers part of a marketing budget from one album to another. """
# [START spanner_dml_getting_started_update]
# instance_id = "your-spanner-instance"
# database_id = "your-spanner-db-id"
Expand All @@ -977,28 +977,28 @@ def write_with_dml_transaction(instance_id, database_id):
def transfer_budget(transaction):
# Transfer marketing budget from one album to another. Performed in a
# single transaction to ensure that the transfer is atomic.
first_album_result = transaction.execute_sql(
second_album_result = transaction.execute_sql(
"SELECT MarketingBudget from Albums "
"WHERE SingerId = 1 and AlbumId = 1"
"WHERE SingerId = 2 and AlbumId = 2"
)
first_album_row = list(first_album_result)[0]
first_album_budget = first_album_row[0]
second_album_row = list(second_album_result)[0]
second_album_budget = second_album_row[0]

transfer_amount = 300000
transfer_amount = 200000

# Transaction will only be committed if this condition still holds at
# the time of commit. Otherwise it will be aborted and the callable
# will be rerun by the client library
if first_album_budget >= transfer_amount:
second_album_result = transaction.execute_sql(
if second_album_budget >= transfer_amount:
first_album_result = transaction.execute_sql(
"SELECT MarketingBudget from Albums "
"WHERE SingerId = 1 and AlbumId = 1"
)
second_album_row = list(second_album_result)[0]
second_album_budget = second_album_row[0]
first_album_row = list(first_album_result)[0]
first_album_budget = first_album_row[0]

first_album_budget -= transfer_amount
second_album_budget += transfer_amount
second_album_budget -= transfer_amount
first_album_budget += transfer_amount

# Update first album
transaction.execute_update(
Expand All @@ -1018,7 +1018,7 @@ def transfer_budget(transaction):
param_types={"AlbumBudget": spanner.param_types.INT64}
)

print("Transferred {} from Album1's budget to Album2's".format(
print("Transferred {} from Album2's budget to Album1's".format(
transfer_amount))

database.run_in_transaction(transfer_budget)
Expand Down
2 changes: 1 addition & 1 deletion spanner/cloud-client/snippets_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,7 @@ def test_query_data_with_parameter(capsys):
def test_write_with_dml_transaction(capsys):
snippets.write_with_dml_transaction(INSTANCE_ID, DATABASE_ID)
out, _ = capsys.readouterr()
assert "Transferred 300000 from Album1's budget to Album2's" in out
assert "Transferred 200000 from Album2's budget to Album1's" in out


def update_data_with_partitioned_dml(capsys):
Expand Down

0 comments on commit d964673

Please sign in to comment.