Skip to content

Commit

Permalink
feat: adding support and samples for jsonb (googleapis#851)
Browse files Browse the repository at this point in the history
* changes for testing in postgres

* changes for jsonb

* samples

* linting

* linting

* Revert "linting"

This reverts commit 8563815.

* Revert "linting"

This reverts commit 4910f59.

* Revert "samples"

This reverts commit ba80e5a.

* samples

* lint

* changes as per comments

* removing file

* changes as per review

* Update pg_snippets.py

* Update pg_snippets.py

* Update pg_snippets.py

* Update pg_snippets.py

* Update pg_snippets.py
  • Loading branch information
asthamohta committed Nov 7, 2022
1 parent 57cbf4d commit 268924d
Show file tree
Hide file tree
Showing 6 changed files with 185 additions and 2 deletions.
1 change: 1 addition & 0 deletions google/cloud/spanner_v1/param_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
NUMERIC = Type(code=TypeCode.NUMERIC)
JSON = Type(code=TypeCode.JSON)
PG_NUMERIC = Type(code=TypeCode.NUMERIC, type_annotation=TypeAnnotationCode.PG_NUMERIC)
PG_JSONB = Type(code=TypeCode.JSON, type_annotation=TypeAnnotationCode.PG_JSONB)


def Array(element_type):
Expand Down
128 changes: 128 additions & 0 deletions samples/samples/pg_snippets.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
from google.cloud import spanner, spanner_admin_database_v1
from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect
from google.cloud.spanner_v1 import param_types
from google.cloud.spanner_v1.data_types import JsonObject

OPERATION_TIMEOUT_SECONDS = 240

Expand Down Expand Up @@ -1342,6 +1343,133 @@ def query_data_with_query_options(instance_id, database_id):
# [END spanner_postgresql_query_with_query_options]


# [START spanner_postgresql_jsonb_add_column]
def add_jsonb_column(instance_id, database_id):
"""
Alters Venues tables in the database adding a JSONB column.
You can create the table by running the `create_table_with_datatypes`
sample or by running this DDL statement against your database:
CREATE TABLE Venues (
VenueId BIGINT NOT NULL,
VenueName character varying(100),
VenueInfo BYTEA,
Capacity BIGINT,
OutdoorVenue BOOL,
PopularityScore FLOAT8,
Revenue NUMERIC,
LastUpdateTime SPANNER.COMMIT_TIMESTAMP NOT NULL,
PRIMARY KEY (VenueId))
"""
# instance_id = "your-spanner-instance"
# database_id = "your-spanner-db-id"

spanner_client = spanner.Client()
instance = spanner_client.instance(instance_id)
database = instance.database(database_id)

operation = database.update_ddl(
["ALTER TABLE Venues ADD COLUMN VenueDetails JSONB"]
)

print("Waiting for operation to complete...")
operation.result(OPERATION_TIMEOUT_SECONDS)

print(
'Altered table "Venues" on database {} on instance {}.'.format(
database_id, instance_id
)
)


# [END spanner_postgresql_jsonb_add_column]


# [START spanner_postgresql_jsonb_update_data]
def update_data_with_jsonb(instance_id, database_id):
"""Updates Venues tables in the database with the JSONB
column.
This updates the `VenueDetails` column which must be created before
running this sample. You can add the column by running the
`add_jsonb_column` sample or by running this DDL statement
against your database:
ALTER TABLE Venues ADD COLUMN VenueDetails JSONB
"""
# instance_id = "your-spanner-instance"
# database_id = "your-spanner-db-id"

spanner_client = spanner.Client()
instance = spanner_client.instance(instance_id)
database = instance.database(database_id)

"""
PG JSONB takes the last value in the case of duplicate keys.
PG JSONB sorts first by key length and then lexicographically with
equivalent key length.
"""

with database.batch() as batch:
batch.update(
table="Venues",
columns=("VenueId", "VenueDetails"),
values=[
(
4,
JsonObject(
[
JsonObject({"name": None, "open": True}),
JsonObject(
{"name": "room 2", "open": False}
),
]
),
),
(19, JsonObject(rating=9, open=True)),
(
42,
JsonObject(
{
"name": None,
"open": {"Monday": True, "Tuesday": False},
"tags": ["large", "airy"],
}
),
),
],
)

print("Updated data.")


# [END spanner_postgresql_jsonb_update_data]

# [START spanner_postgresql_jsonb_query_parameter]
def query_data_with_jsonb_parameter(instance_id, database_id):
"""Queries sample data using SQL with a JSONB parameter."""
# instance_id = "your-spanner-instance"
# database_id = "your-spanner-db-id"

spanner_client = spanner.Client()
instance = spanner_client.instance(instance_id)
database = instance.database(database_id)

param = {"p1": 2}
param_type = {"p1": param_types.INT64}

with database.snapshot() as snapshot:
results = snapshot.execute_sql(
"SELECT venueid, venuedetails FROM Venues"
+ " WHERE CAST(venuedetails ->> 'rating' AS INTEGER) > $1",
params=param,
param_types=param_type,
)

for row in results:
print("VenueId: {}, VenueDetails: {}".format(*row))


# [END spanner_postgresql_jsonb_query_parameter]


if __name__ == "__main__": # noqa: C901
parser = argparse.ArgumentParser(
description=__doc__,
Expand Down
22 changes: 22 additions & 0 deletions samples/samples/pg_snippets_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -449,3 +449,25 @@ def test_create_client_with_query_options(capsys, instance_id, sample_database):
assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out
assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out
assert "VenueId: 42, VenueName: Venue 42, LastUpdateTime:" in out


@pytest.mark.dependency(name="add_jsonb_column", depends=["insert_datatypes_data"])
def test_add_jsonb_column(capsys, instance_id, sample_database):
snippets.add_jsonb_column(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Waiting for operation to complete..." in out
assert 'Altered table "Venues" on database ' in out


@pytest.mark.dependency(name="update_data_with_jsonb", depends=["add_jsonb_column"])
def test_update_data_with_jsonb(capsys, instance_id, sample_database):
snippets.update_data_with_jsonb(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Updated data." in out


@pytest.mark.dependency(depends=["update_data_with_jsonb"])
def test_query_data_with_jsonb_parameter(capsys, instance_id, sample_database):
snippets.query_data_with_jsonb_parameter(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "VenueId: 19, VenueDetails: {'open': True, 'rating': 9}" in out
1 change: 1 addition & 0 deletions tests/_fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,7 @@
string_value VARCHAR(16),
timestamp_value TIMESTAMPTZ,
numeric_value NUMERIC,
jsonb_value JSONB,
PRIMARY KEY (pkey) );
CREATE TABLE counters (
name VARCHAR(1024),
Expand Down
18 changes: 16 additions & 2 deletions tests/system/test_session_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@
LIVE_ALL_TYPES_COLUMNS[:1]
+ LIVE_ALL_TYPES_COLUMNS[1:7:2]
+ LIVE_ALL_TYPES_COLUMNS[9:17:2]
+ ("jsonb_value",)
)

AllTypesRowData = collections.namedtuple("AllTypesRowData", LIVE_ALL_TYPES_COLUMNS)
Expand Down Expand Up @@ -120,7 +121,7 @@
AllTypesRowData(pkey=108, timestamp_value=NANO_TIME),
AllTypesRowData(pkey=109, numeric_value=NUMERIC_1),
AllTypesRowData(pkey=110, json_value=JSON_1),
AllTypesRowData(pkey=111, json_value=[JSON_1, JSON_2]),
AllTypesRowData(pkey=111, json_value=JsonObject([JSON_1, JSON_2])),
# empty array values
AllTypesRowData(pkey=201, int_array=[]),
AllTypesRowData(pkey=202, bool_array=[]),
Expand Down Expand Up @@ -184,12 +185,13 @@
PostGresAllTypesRowData(pkey=107, timestamp_value=SOME_TIME),
PostGresAllTypesRowData(pkey=108, timestamp_value=NANO_TIME),
PostGresAllTypesRowData(pkey=109, numeric_value=NUMERIC_1),
PostGresAllTypesRowData(pkey=110, jsonb_value=JSON_1),
)

if _helpers.USE_EMULATOR:
ALL_TYPES_COLUMNS = EMULATOR_ALL_TYPES_COLUMNS
ALL_TYPES_ROWDATA = EMULATOR_ALL_TYPES_ROWDATA
elif _helpers.DATABASE_DIALECT:
elif _helpers.DATABASE_DIALECT == "POSTGRESQL":
ALL_TYPES_COLUMNS = POSTGRES_ALL_TYPES_COLUMNS
ALL_TYPES_ROWDATA = POSTGRES_ALL_TYPES_ROWDATA
else:
Expand Down Expand Up @@ -2105,6 +2107,18 @@ def test_execute_sql_w_json_bindings(
)


def test_execute_sql_w_jsonb_bindings(
not_emulator, not_google_standard_sql, sessions_database, database_dialect
):
_bind_test_helper(
sessions_database,
database_dialect,
spanner_v1.param_types.PG_JSONB,
JSON_1,
[JSON_1, JSON_2],
)


def test_execute_sql_w_query_param_struct(sessions_database, not_postgres):
name = "Phred"
count = 123
Expand Down
17 changes: 17 additions & 0 deletions tests/unit/test_param_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,3 +54,20 @@ def test_it(self):
)

self.assertEqual(found, expected)


class Test_JsonbParamType(unittest.TestCase):
def test_it(self):
from google.cloud.spanner_v1 import Type
from google.cloud.spanner_v1 import TypeCode
from google.cloud.spanner_v1 import TypeAnnotationCode
from google.cloud.spanner_v1 import param_types

expected = Type(
code=TypeCode.JSON,
type_annotation=TypeAnnotationCode(TypeAnnotationCode.PG_JSONB),
)

found = param_types.PG_JSONB

self.assertEqual(found, expected)

0 comments on commit 268924d

Please sign in to comment.