Skip to content

Commit

Permalink
Separate row types to remove confusion around return types of 'row.co…
Browse files Browse the repository at this point in the history
…mmit'. (#8662)
  • Loading branch information
Gurov Ilya authored and tseaver committed Jul 26, 2019
1 parent ab1d59e commit 82c2575
Show file tree
Hide file tree
Showing 3 changed files with 206 additions and 44 deletions.
119 changes: 101 additions & 18 deletions bigtable/docs/snippets_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -403,11 +403,94 @@ def test_bigtable_table_row():
row2_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME1, CELL_VAL1)
row2_obj.commit()

actual_rows_keys = []
written_row_keys = []
for row in table.read_rows():
actual_rows_keys.append(row.row_key)
written_row_keys.append(row.row_key)

assert actual_rows_keys == row_keys
assert written_row_keys == row_keys

table.truncate(timeout=300)


def test_bigtable_table_append_row():
# [START bigtable_table_append_row]
from google.cloud.bigtable import Client

client = Client(admin=True)
instance = client.instance(INSTANCE_ID)
table = instance.table(TABLE_ID)

row_keys = [b"row_key_1", b"row_key_2"]
row1_obj = table.append_row(row_keys[0])
row2_obj = table.append_row(row_keys[1])
# [END bigtable_table_append_row]

row1_obj.append_cell_value(COLUMN_FAMILY_ID, COL_NAME1, CELL_VAL1)
row1_obj.commit()
row2_obj.append_cell_value(COLUMN_FAMILY_ID, COL_NAME1, CELL_VAL1)
row2_obj.commit()

written_row_keys = []
for row in table.read_rows():
written_row_keys.append(row.row_key)

assert written_row_keys == row_keys

table.truncate(timeout=300)


def test_bigtable_table_direct_row():
# [START bigtable_table_direct_row]
from google.cloud.bigtable import Client

client = Client(admin=True)
instance = client.instance(INSTANCE_ID)
table = instance.table(TABLE_ID)

row_keys = [b"row_key_1", b"row_key_2"]
row1_obj = table.direct_row(row_keys[0])
row2_obj = table.direct_row(row_keys[1])
# [END bigtable_table_direct_row]

row1_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME1, CELL_VAL1)
row1_obj.commit()
row2_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME1, CELL_VAL1)
row2_obj.commit()

written_row_keys = []
for row in table.read_rows():
written_row_keys.append(row.row_key)

assert written_row_keys == row_keys

table.truncate(timeout=300)


def test_bigtable_table_conditional_row():
# [START bigtable_table_conditional_row]
from google.cloud.bigtable import Client
from google.cloud.bigtable.row_filters import PassAllFilter

client = Client(admin=True)
instance = client.instance(INSTANCE_ID)
table = instance.table(TABLE_ID)

row_keys = [b"row_key_1", b"row_key_2"]
filter_ = PassAllFilter(True)
row1_obj = table.conditional_row(row_keys[0], filter_=filter_)
row2_obj = table.conditional_row(row_keys[1], filter_=filter_)
# [END bigtable_table_conditional_row]

row1_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME1, CELL_VAL1, state=False)
row1_obj.commit()
row2_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME1, CELL_VAL1, state=False)
row2_obj.commit()

written_row_keys = []
for row in table.read_rows():
written_row_keys.append(row.row_key)

assert written_row_keys == row_keys

table.truncate(timeout=300)

Expand Down Expand Up @@ -910,10 +993,10 @@ def test_bigtable_row_delete():
row_obj = table_row_del.row(b"row_key_1")
row_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME1, b"cell-val")
row_obj.commit()
actual_rows_keys = []
written_row_keys = []
for row in table_row_del.read_rows():
actual_rows_keys.append(row.row_key)
assert actual_rows_keys == [b"row_key_1"]
written_row_keys.append(row.row_key)
assert written_row_keys == [b"row_key_1"]

# [START bigtable_row_delete]
from google.cloud.bigtable import Client
Expand All @@ -929,10 +1012,10 @@ def test_bigtable_row_delete():
row_obj.commit()
# [END bigtable_row_delete]

actual_rows_keys = []
written_row_keys = []
for row in table.read_rows():
actual_rows_keys.append(row.row_key)
assert len(actual_rows_keys) == 0
written_row_keys.append(row.row_key)
assert len(written_row_keys) == 0


def test_bigtable_row_delete_cell():
Expand All @@ -942,10 +1025,10 @@ def test_bigtable_row_delete_cell():
row_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME1, CELL_VAL1)
row_obj.commit()

actual_rows_keys = []
written_row_keys = []
for row in table_row_del_cell.read_rows():
actual_rows_keys.append(row.row_key)
assert actual_rows_keys == [row_key1]
written_row_keys.append(row.row_key)
assert written_row_keys == [row_key1]

# [START bigtable_row_delete_cell]
from google.cloud.bigtable import Client
Expand Down Expand Up @@ -975,10 +1058,10 @@ def test_bigtable_row_delete_cells():
row_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME2, CELL_VAL2)
row_obj.commit()

actual_rows_keys = []
written_row_keys = []
for row in table_row_del_cells.read_rows():
actual_rows_keys.append(row.row_key)
assert actual_rows_keys == [row_key1]
written_row_keys.append(row.row_key)
assert written_row_keys == [row_key1]

# [START bigtable_row_delete_cells]
from google.cloud.bigtable import Client
Expand Down Expand Up @@ -1075,11 +1158,11 @@ def test_bigtable_row_setcell_commit_rowkey():
row_obj.commit()
# [END bigtable_row_commit]

actual_rows_keys = []
written_row_keys = []
for row in table.read_rows():
actual_rows_keys.append(row.row_key)
written_row_keys.append(row.row_key)

assert actual_rows_keys == [b"row_key_1", b"row_key_2"]
assert written_row_keys == [b"row_key_1", b"row_key_2"]

# [START bigtable_row_row_key]
from google.cloud.bigtable import Client
Expand Down
62 changes: 62 additions & 0 deletions bigtable/google/cloud/bigtable/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,6 +190,14 @@ def row(self, row_key, filter_=None, append=False):
:raises: :class:`ValueError <exceptions.ValueError>` if both
``filter_`` and ``append`` are used.
"""
warnings.warn(
"This method will be deprecated in future versions. Please "
"use Table.append_row(), Table.conditional_row() "
"and Table.direct_row() methods instead.",
PendingDeprecationWarning,
stacklevel=2,
)

if append and filter_ is not None:
raise ValueError("At most one of filter_ and append can be set")
if append:
Expand All @@ -199,6 +207,60 @@ def row(self, row_key, filter_=None, append=False):
else:
return DirectRow(row_key, self)

def append_row(self, row_key):
"""Create a :class:`~google.cloud.bigtable.row.AppendRow` associated with this table.
For example:
.. literalinclude:: snippets_table.py
:start-after: [START bigtable_table_append_row]
:end-before: [END bigtable_table_append_row]
Args:
row_key (bytes): The key for the row being created.
Returns:
A row owned by this table.
"""
return AppendRow(row_key, self)

def direct_row(self, row_key):
"""Create a :class:`~google.cloud.bigtable.row.DirectRow` associated with this table.
For example:
.. literalinclude:: snippets_table.py
:start-after: [START bigtable_table_direct_row]
:end-before: [END bigtable_table_direct_row]
Args:
row_key (bytes): The key for the row being created.
Returns:
A row owned by this table.
"""
return DirectRow(row_key, self)

def conditional_row(self, row_key, filter_):
"""Create a :class:`~google.cloud.bigtable.row.ConditionalRow` associated with this table.
For example:
.. literalinclude:: snippets_table.py
:start-after: [START bigtable_table_conditional_row]
:end-before: [END bigtable_table_conditional_row]
Args:
row_key (bytes): The key for the row being created.
filter_ (:class:`.RowFilter`): (Optional) Filter to be used for
conditional mutations. See :class:`.ConditionalRow` for more details.
Returns:
A row owned by this table.
"""
return ConditionalRow(row_key, self, filter_=filter_)

def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
Expand Down
69 changes: 43 additions & 26 deletions bigtable/tests/unit/test_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,16 +180,19 @@ def test_constructor_wo_admin(self):
self.assertIs(table._instance._client, client)
self.assertEqual(table.name, self.TABLE_NAME)

def test_row_factory_direct(self):
from google.cloud.bigtable.row import DirectRow

credentials = _make_credentials()
def _row_methods_helper(self):
client = self._make_client(
project="project-id", credentials=credentials, admin=True
project="project-id", credentials=_make_credentials(), admin=True
)
instance = client.instance(instance_id=self.INSTANCE_ID)
table = self._make_one(self.TABLE_ID, instance)
row_key = b"row_key"
return table, row_key

def test_row_factory_direct(self):
from google.cloud.bigtable.row import DirectRow

table, row_key = self._row_methods_helper()
row = table.row(row_key)

self.assertIsInstance(row, DirectRow)
Expand All @@ -199,13 +202,7 @@ def test_row_factory_direct(self):
def test_row_factory_conditional(self):
from google.cloud.bigtable.row import ConditionalRow

credentials = _make_credentials()
client = self._make_client(
project="project-id", credentials=credentials, admin=True
)
instance = client.instance(instance_id=self.INSTANCE_ID)
table = self._make_one(self.TABLE_ID, instance)
row_key = b"row_key"
table, row_key = self._row_methods_helper()
filter_ = object()
row = table.row(row_key, filter_=filter_)

Expand All @@ -216,28 +213,48 @@ def test_row_factory_conditional(self):
def test_row_factory_append(self):
from google.cloud.bigtable.row import AppendRow

credentials = _make_credentials()
client = self._make_client(
project="project-id", credentials=credentials, admin=True
)
instance = client.instance(instance_id=self.INSTANCE_ID)
table = self._make_one(self.TABLE_ID, instance)
row_key = b"row_key"
table, row_key = self._row_methods_helper()
row = table.row(row_key, append=True)

self.assertIsInstance(row, AppendRow)
self.assertEqual(row._row_key, row_key)
self.assertEqual(row._table, table)

def test_direct_row(self):
from google.cloud.bigtable.row import DirectRow

table, row_key = self._row_methods_helper()
row = table.direct_row(row_key)

self.assertIsInstance(row, DirectRow)
self.assertEqual(row._row_key, row_key)
self.assertEqual(row._table, table)

def test_conditional_row(self):
from google.cloud.bigtable.row import ConditionalRow

table, row_key = self._row_methods_helper()
filter_ = object()
row = table.conditional_row(row_key, filter_=filter_)

self.assertIsInstance(row, ConditionalRow)
self.assertEqual(row._row_key, row_key)
self.assertEqual(row._table, table)

def test_append_row(self):
from google.cloud.bigtable.row import AppendRow

table, row_key = self._row_methods_helper()
row = table.append_row(row_key)

self.assertIsInstance(row, AppendRow)
self.assertEqual(row._row_key, row_key)
self.assertEqual(row._table, table)

def test_row_factory_failure(self):
credentials = _make_credentials()
client = self._make_client(
project="project-id", credentials=credentials, admin=True
)
instance = client.instance(instance_id=self.INSTANCE_ID)
table = self._make_one(self.TABLE_ID, instance)
table, row_key = self._row_methods_helper()
with self.assertRaises(ValueError):
table.row(b"row_key", filter_=object(), append=True)
table.row(row_key, filter_=object(), append=True)

def test___eq__(self):
credentials = _make_credentials()
Expand Down

0 comments on commit 82c2575

Please sign in to comment.