Skip to content

Commit

Permalink
Spanner Commit Timestamp Sample [(#1425)](GoogleCloudPlatform/python-…
Browse files Browse the repository at this point in the history
  • Loading branch information
kurtisvg authored and andrewsg committed Mar 27, 2018
1 parent 7aba51c commit a662787
Show file tree
Hide file tree
Showing 3 changed files with 215 additions and 2 deletions.
2 changes: 1 addition & 1 deletion samples/samples/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
google-cloud-spanner==1.2.0
google-cloud-spanner==1.3.0
futures==3.2.0; python_version < "3"
161 changes: 160 additions & 1 deletion samples/samples/snippets.py
Original file line number Diff line number Diff line change
Expand Up @@ -460,7 +460,145 @@ def read_only_transaction(instance_id, database_id):
# [END spanner_read_only_transaction]


if __name__ == '__main__':
# [START spanner_create_table_with_timestamp_column]
def create_table_with_timestamp(instance_id, database_id):
"""Creates a table with a COMMIT_TIMESTAMP column."""

spanner_client = spanner.Client()
instance = spanner_client.instance(instance_id)
database = instance.database(database_id)

operation = database.update_ddl([
"""CREATE TABLE Performances (
SingerId INT64 NOT NULL,
VenueId INT64 NOT NULL,
EventDate Date,
Revenue INT64,
LastUpdateTime TIMESTAMP NOT NULL
OPTIONS(allow_commit_timestamp=true)
) PRIMARY KEY (SingerId, VenueId, EventDate),
INTERLEAVE IN PARENT Singers ON DELETE CASCADE"""
])

print('Waiting for operation to complete...')
operation.result()

print('Created Performances table on database {} on instance {}'.format(
database_id, instance_id))
# [END spanner_create_table_with_timestamp_column]


# [START spanner_insert_data_with_timestamp_column]
def insert_data_with_timestamp(instance_id, database_id):
"""Inserts data with a COMMIT_TIMESTAMP field into a table. """

spanner_client = spanner.Client()
instance = spanner_client.instance(instance_id)

database = instance.database(database_id)

with database.batch() as batch:
batch.insert(
table='Performances',
columns=(
'SingerId', 'VenueId', 'EventDate',
'Revenue', 'LastUpdateTime',),
values=[
(1, 4, "2017-10-05", 11000, spanner.COMMIT_TIMESTAMP),
(1, 19, "2017-11-02", 15000, spanner.COMMIT_TIMESTAMP),
(2, 42, "2017-12-23", 7000, spanner.COMMIT_TIMESTAMP)])

print('Inserted data.')
# [END spanner_insert_data_with_timestamp_column]


# [START spanner_add_timestamp_column]
def add_timestamp_column(instance_id, database_id):
"""
Adds a new TIMESTAMP column to the Albums table in the example database.
"""
spanner_client = spanner.Client()
instance = spanner_client.instance(instance_id)

database = instance.database(database_id)

operation = database.update_ddl([
'ALTER TABLE Albums ADD COLUMN LastUpdateTime TIMESTAMP '
'OPTIONS(allow_commit_timestamp=true)'])

print('Waiting for operation to complete...')
operation.result()

print('Altered table "Albums" on database {} on instance {}.'.format(
database_id, instance_id))
# [END spanner_add_timestamp_column]


# [START spanner_update_data_with_timestamp_column]
def update_data_with_timestamp(instance_id, database_id):
"""Updates Performances tables in the database with the COMMIT_TIMESTAMP
column.
This updates the `MarketingBudget` column which must be created before
running this sample. You can add the column by running the `add_column`
sample or by running this DDL statement against your database:
ALTER TABLE Albums ADD COLUMN MarketingBudget INT64
In addition this update expects the LastUpdateTime column added by
applying this DDL statement against your database:
ALTER TABLE Albums ADD COLUMN LastUpdateTime TIMESTAMP
OPTIONS(allow_commit_timestamp=true)
"""
spanner_client = spanner.Client()
instance = spanner_client.instance(instance_id)

database = instance.database(database_id)

with database.batch() as batch:
batch.update(
table='Albums',
columns=(
'SingerId', 'AlbumId', 'MarketingBudget', 'LastUpdateTime'),
values=[
(1, 4, 11000, spanner.COMMIT_TIMESTAMP),
(1, 19, 15000, spanner.COMMIT_TIMESTAMP),
(2, 42, 7000, spanner.COMMIT_TIMESTAMP)])

print('Updated data.')
# [END spanner_update_data_with_timestamp_column]


# [START spanner_query_data_with_timestamp_column]
def query_data_with_timestamp(instance_id, database_id):
"""Queries sample data from the database using SQL.
This updates the `LastUpdateTime` column which must be created before
running this sample. You can add the column by running the
`add_timestamp_column` sample or by running this DDL statement
against your database:
ALTER TABLE Performances ADD COLUMN LastUpdateTime TIMESTAMP
OPTIONS (allow_commit_timestamp=true)
"""
spanner_client = spanner.Client()
instance = spanner_client.instance(instance_id)

database = instance.database(database_id)

with database.snapshot() as snapshot:
results = snapshot.execute_sql(
'SELECT SingerId, AlbumId, AlbumTitle FROM Albums '
'ORDER BY LastUpdateTime DESC')

for row in results:
print(u'SingerId: {}, AlbumId: {}, AlbumTitle: {}'.format(*row))
# [END spanner_query_data_with_timestamp_column]


if __name__ == '__main__': # noqa: C901
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
Expand Down Expand Up @@ -495,6 +633,17 @@ def read_only_transaction(instance_id, database_id):
subparsers.add_parser('add_storing_index', help=add_storing_index.__doc__)
subparsers.add_parser(
'read_data_with_storing_index', help=insert_data.__doc__)
subparsers.add_parser(
'create_table_with_timestamp',
help=create_table_with_timestamp.__doc__)
subparsers.add_parser(
'insert_data_with_timestamp', help=insert_data_with_timestamp.__doc__)
subparsers.add_parser(
'add_timestamp_column', help=add_timestamp_column.__doc__)
subparsers.add_parser(
'update_data_with_timestamp', help=update_data_with_timestamp.__doc__)
subparsers.add_parser(
'query_data_with_timestamp', help=query_data_with_timestamp.__doc__)

args = parser.parse_args()

Expand Down Expand Up @@ -530,3 +679,13 @@ def read_only_transaction(instance_id, database_id):
add_storing_index(args.instance_id, args.database_id)
elif args.command == 'read_data_with_storing_index':
read_data_with_storing_index(args.instance_id, args.database_id)
elif args.command == 'create_table_with_timestamp':
create_table_with_timestamp(args.instance_id, args.database_id)
elif args.command == 'insert_data_with_timestamp':
insert_data_with_timestamp(args.instance_id, args.database_id)
elif args.command == 'add_timestamp_column':
add_timestamp_column(args.instance_id, args.database_id)
elif args.command == 'update_data_with_timestamp':
update_data_with_timestamp(args.instance_id, args.database_id)
elif args.command == 'query_data_with_timestamp':
query_data_with_timestamp(args.instance_id, args.database_id)
54 changes: 54 additions & 0 deletions samples/samples/snippets_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,3 +180,57 @@ def _():
out, _ = capsys.readouterr()

assert 'Forever Hold Your Peace' in out


def test_create_table_with_timestamp(temporary_database, capsys):
snippets.create_table_with_timestamp(
SPANNER_INSTANCE,
temporary_database.database_id)

out, _ = capsys.readouterr()

assert 'Performances' in out


def test_insert_data_with_timestamp(temporary_database, capsys):
snippets.insert_data_with_timestamp(
SPANNER_INSTANCE,
temporary_database.database_id)

out, _ = capsys.readouterr()

assert 'Inserted data.' in out


def test_add_timestamp_column(temporary_database, capsys):
snippets.add_timestamp_column(
SPANNER_INSTANCE,
temporary_database.database_id)

out, _ = capsys.readouterr()

assert 'Albums' in out


@pytest.mark.slow
def test_update_data_with_timestamp(temporary_database, capsys):
snippets.update_data_with_timestamp(
SPANNER_INSTANCE,
temporary_database.database_id)

out, _ = capsys.readouterr()

assert 'Updated data.' in out


@pytest.mark.slow
def test_query_data_with_timestamp(temporary_database, capsys):
@eventually_consistent.call
def _():
snippets.query_data_with_timestamp(
SPANNER_INSTANCE,
temporary_database.database_id)

out, _ = capsys.readouterr()

assert 'Go, Go, Go' in out

0 comments on commit a662787

Please sign in to comment.